code stringlengths 1 1.72M | language stringclasses 1
value |
|---|---|
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import xml.dom.minidom
from git_config import GitConfig
from git_config import IsId
from manifest import Manifest
from project import RemoteSpec
from project import Project
from project import MetaProject
from project import R_HEADS
from project import HEAD
from error import ManifestParseError
MANIFEST_FILE_NAME = 'manifest.xml'
LOCAL_MANIFEST_NAME = 'local_manifest.xml'
R_M = 'refs/remotes/m/'
class _Default(object):
"""Project defaults within the manifest."""
revisionExpr = None
remote = None
class _XmlRemote(object):
def __init__(self,
name,
fetch=None,
review=None):
self.name = name
self.fetchUrl = fetch
self.reviewUrl = review
def ToRemoteSpec(self, projectName):
url = self.fetchUrl
while url.endswith('/'):
url = url[:-1]
url += '/%s.git' % projectName
return RemoteSpec(self.name, url, self.reviewUrl)
class XmlManifest(Manifest):
"""manages the repo configuration file"""
def __init__(self, repodir):
Manifest.__init__(self, repodir)
self._manifestFile = os.path.join(repodir, MANIFEST_FILE_NAME)
self.manifestProject = MetaProject(self, 'manifests',
gitdir = os.path.join(repodir, 'manifests.git'),
worktree = os.path.join(repodir, 'manifests'))
self._Unload()
def Override(self, name):
"""Use a different manifest, just for the current instantiation.
"""
path = os.path.join(self.manifestProject.worktree, name)
if not os.path.isfile(path):
raise ManifestParseError('manifest %s not found' % name)
old = self._manifestFile
try:
self._manifestFile = path
self._Unload()
self._Load()
finally:
self._manifestFile = old
def Link(self, name):
"""Update the repo metadata to use a different manifest.
"""
self.Override(name)
try:
if os.path.exists(self._manifestFile):
os.remove(self._manifestFile)
os.symlink('manifests/%s' % name, self._manifestFile)
except OSError, e:
raise ManifestParseError('cannot link manifest %s' % name)
def _RemoteToXml(self, r, doc, root):
e = doc.createElement('remote')
root.appendChild(e)
e.setAttribute('name', r.name)
e.setAttribute('fetch', r.fetchUrl)
if r.reviewUrl is not None:
e.setAttribute('review', r.reviewUrl)
def Save(self, fd, peg_rev=False):
"""Write the current manifest out to the given file descriptor.
"""
doc = xml.dom.minidom.Document()
root = doc.createElement('manifest')
doc.appendChild(root)
# Save out the notice. There's a little bit of work here to give it the
# right whitespace, which assumes that the notice is automatically indented
# by 4 by minidom.
if self.notice:
notice_element = root.appendChild(doc.createElement('notice'))
notice_lines = self.notice.splitlines()
indented_notice = ('\n'.join(" "*4 + line for line in notice_lines))[4:]
notice_element.appendChild(doc.createTextNode(indented_notice))
d = self.default
sort_remotes = list(self.remotes.keys())
sort_remotes.sort()
for r in sort_remotes:
self._RemoteToXml(self.remotes[r], doc, root)
if self.remotes:
root.appendChild(doc.createTextNode(''))
have_default = False
e = doc.createElement('default')
if d.remote:
have_default = True
e.setAttribute('remote', d.remote.name)
if d.revisionExpr:
have_default = True
e.setAttribute('revision', d.revisionExpr)
if have_default:
root.appendChild(e)
root.appendChild(doc.createTextNode(''))
if self._manifest_server:
e = doc.createElement('manifest-server')
e.setAttribute('url', self._manifest_server)
root.appendChild(e)
root.appendChild(doc.createTextNode(''))
sort_projects = list(self.projects.keys())
sort_projects.sort()
for p in sort_projects:
p = self.projects[p]
e = doc.createElement('project')
root.appendChild(e)
e.setAttribute('name', p.name)
if p.relpath != p.name:
e.setAttribute('path', p.relpath)
if not d.remote or p.remote.name != d.remote.name:
e.setAttribute('remote', p.remote.name)
if peg_rev:
if self.IsMirror:
e.setAttribute('revision',
p.bare_git.rev_parse(p.revisionExpr + '^0'))
else:
e.setAttribute('revision',
p.work_git.rev_parse(HEAD + '^0'))
elif not d.revisionExpr or p.revisionExpr != d.revisionExpr:
e.setAttribute('revision', p.revisionExpr)
for c in p.copyfiles:
ce = doc.createElement('copyfile')
ce.setAttribute('src', c.src)
ce.setAttribute('dest', c.dest)
e.appendChild(ce)
doc.writexml(fd, '', ' ', '\n', 'UTF-8')
@property
def projects(self):
self._Load()
return self._projects
@property
def remotes(self):
self._Load()
return self._remotes
@property
def default(self):
self._Load()
return self._default
@property
def notice(self):
self._Load()
return self._notice
@property
def manifest_server(self):
self._Load()
return self._manifest_server
def InitBranch(self):
m = self.manifestProject
if m.CurrentBranch is None:
return m.StartBranch('default')
return True
def SetMRefs(self, project):
if self.branch:
project._InitAnyMRef(R_M + self.branch)
def _Unload(self):
self._loaded = False
self._projects = {}
self._remotes = {}
self._default = None
self._notice = None
self.branch = None
self._manifest_server = None
def _Load(self):
if not self._loaded:
m = self.manifestProject
b = m.GetBranch(m.CurrentBranch)
if b.remote and b.remote.name:
m.remote.name = b.remote.name
b = b.merge
if b is not None and b.startswith(R_HEADS):
b = b[len(R_HEADS):]
self.branch = b
self._ParseManifest(True)
local = os.path.join(self.repodir, LOCAL_MANIFEST_NAME)
if os.path.exists(local):
try:
real = self._manifestFile
self._manifestFile = local
self._ParseManifest(False)
finally:
self._manifestFile = real
if self.IsMirror:
self._AddMetaProjectMirror(self.repoProject)
self._AddMetaProjectMirror(self.manifestProject)
self._loaded = True
def _ParseManifest(self, is_root_file):
root = xml.dom.minidom.parse(self._manifestFile)
if not root or not root.childNodes:
raise ManifestParseError, \
"no root node in %s" % \
self._manifestFile
config = root.childNodes[0]
if config.nodeName != 'manifest':
raise ManifestParseError, \
"no <manifest> in %s" % \
self._manifestFile
for node in config.childNodes:
if node.nodeName == 'remove-project':
name = self._reqatt(node, 'name')
try:
del self._projects[name]
except KeyError:
raise ManifestParseError, \
'project %s not found' % \
(name)
for node in config.childNodes:
if node.nodeName == 'remote':
remote = self._ParseRemote(node)
if self._remotes.get(remote.name):
raise ManifestParseError, \
'duplicate remote %s in %s' % \
(remote.name, self._manifestFile)
self._remotes[remote.name] = remote
for node in config.childNodes:
if node.nodeName == 'default':
if self._default is not None:
raise ManifestParseError, \
'duplicate default in %s' % \
(self._manifestFile)
self._default = self._ParseDefault(node)
if self._default is None:
self._default = _Default()
for node in config.childNodes:
if node.nodeName == 'notice':
if self._notice is not None:
raise ManifestParseError, \
'duplicate notice in %s' % \
(self.manifestFile)
self._notice = self._ParseNotice(node)
for node in config.childNodes:
if node.nodeName == 'manifest-server':
url = self._reqatt(node, 'url')
if self._manifest_server is not None:
raise ManifestParseError, \
'duplicate manifest-server in %s' % \
(self.manifestFile)
self._manifest_server = url
for node in config.childNodes:
if node.nodeName == 'project':
project = self._ParseProject(node)
if self._projects.get(project.name):
raise ManifestParseError, \
'duplicate project %s in %s' % \
(project.name, self._manifestFile)
self._projects[project.name] = project
def _AddMetaProjectMirror(self, m):
name = None
m_url = m.GetRemote(m.remote.name).url
if m_url.endswith('/.git'):
raise ManifestParseError, 'refusing to mirror %s' % m_url
if self._default and self._default.remote:
url = self._default.remote.fetchUrl
if not url.endswith('/'):
url += '/'
if m_url.startswith(url):
remote = self._default.remote
name = m_url[len(url):]
if name is None:
s = m_url.rindex('/') + 1
remote = _XmlRemote('origin', m_url[:s])
name = m_url[s:]
if name.endswith('.git'):
name = name[:-4]
if name not in self._projects:
m.PreSync()
gitdir = os.path.join(self.topdir, '%s.git' % name)
project = Project(manifest = self,
name = name,
remote = remote.ToRemoteSpec(name),
gitdir = gitdir,
worktree = None,
relpath = None,
revisionExpr = m.revisionExpr,
revisionId = None)
self._projects[project.name] = project
def _ParseRemote(self, node):
"""
reads a <remote> element from the manifest file
"""
name = self._reqatt(node, 'name')
fetch = self._reqatt(node, 'fetch')
review = node.getAttribute('review')
if review == '':
review = None
return _XmlRemote(name, fetch, review)
def _ParseDefault(self, node):
"""
reads a <default> element from the manifest file
"""
d = _Default()
d.remote = self._get_remote(node)
d.revisionExpr = node.getAttribute('revision')
if d.revisionExpr == '':
d.revisionExpr = None
return d
def _ParseNotice(self, node):
"""
reads a <notice> element from the manifest file
The <notice> element is distinct from other tags in the XML in that the
data is conveyed between the start and end tag (it's not an empty-element
tag).
The white space (carriage returns, indentation) for the notice element is
relevant and is parsed in a way that is based on how python docstrings work.
In fact, the code is remarkably similar to here:
http://www.python.org/dev/peps/pep-0257/
"""
# Get the data out of the node...
notice = node.childNodes[0].data
# Figure out minimum indentation, skipping the first line (the same line
# as the <notice> tag)...
minIndent = sys.maxint
lines = notice.splitlines()
for line in lines[1:]:
lstrippedLine = line.lstrip()
if lstrippedLine:
indent = len(line) - len(lstrippedLine)
minIndent = min(indent, minIndent)
# Strip leading / trailing blank lines and also indentation.
cleanLines = [lines[0].strip()]
for line in lines[1:]:
cleanLines.append(line[minIndent:].rstrip())
# Clear completely blank lines from front and back...
while cleanLines and not cleanLines[0]:
del cleanLines[0]
while cleanLines and not cleanLines[-1]:
del cleanLines[-1]
return '\n'.join(cleanLines)
def _ParseProject(self, node):
"""
reads a <project> element from the manifest file
"""
name = self._reqatt(node, 'name')
remote = self._get_remote(node)
if remote is None:
remote = self._default.remote
if remote is None:
raise ManifestParseError, \
"no remote for project %s within %s" % \
(name, self._manifestFile)
revisionExpr = node.getAttribute('revision')
if not revisionExpr:
revisionExpr = self._default.revisionExpr
if not revisionExpr:
raise ManifestParseError, \
"no revision for project %s within %s" % \
(name, self._manifestFile)
path = node.getAttribute('path')
if not path:
path = name
if path.startswith('/'):
raise ManifestParseError, \
"project %s path cannot be absolute in %s" % \
(name, self._manifestFile)
if self.IsMirror:
relpath = None
worktree = None
gitdir = os.path.join(self.topdir, '%s.git' % name)
else:
worktree = os.path.join(self.topdir, path).replace('\\', '/')
gitdir = os.path.join(self.repodir, 'projects/%s.git' % path)
project = Project(manifest = self,
name = name,
remote = remote.ToRemoteSpec(name),
gitdir = gitdir,
worktree = worktree,
relpath = path,
revisionExpr = revisionExpr,
revisionId = None)
for n in node.childNodes:
if n.nodeName == 'copyfile':
self._ParseCopyFile(project, n)
return project
def _ParseCopyFile(self, project, node):
src = self._reqatt(node, 'src')
dest = self._reqatt(node, 'dest')
if not self.IsMirror:
# src is project relative;
# dest is relative to the top of the tree
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
def _get_remote(self, node):
name = node.getAttribute('remote')
if not name:
return None
v = self._remotes.get(name)
if not v:
raise ManifestParseError, \
"remote %s not defined in %s" % \
(name, self._manifestFile)
return v
def _reqatt(self, node, attname):
"""
reads a required attribute from the node.
"""
v = node.getAttribute(attname)
if not v:
raise ManifestParseError, \
"no %s in <%s> within %s" % \
(attname, node.nodeName, self._manifestFile)
return v
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from manifest_submodule import SubmoduleManifest
from manifest_xml import XmlManifest
def ParseManifest(repodir, type=None):
if type:
return type(repodir)
if SubmoduleManifest.Is(repodir):
return SubmoduleManifest(repodir)
return XmlManifest(repodir)
_manifest = None
def GetManifest(repodir, reparse=False, type=None):
global _manifest
if _manifest is None \
or reparse \
or (type and _manifest.__class__ != type):
_manifest = ParseManifest(repodir, type=type)
return _manifest
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ManifestParseError(Exception):
"""Failed to parse the manifest file.
"""
class ManifestInvalidRevisionError(Exception):
"""The revision value in a project is incorrect.
"""
class EditorError(Exception):
"""Unspecified error from the user's text editor.
"""
def __init__(self, reason):
self.reason = reason
def __str__(self):
return self.reason
class GitError(Exception):
"""Unspecified internal error from git.
"""
def __init__(self, command):
self.command = command
def __str__(self):
return self.command
class ImportError(Exception):
"""An import from a non-Git format cannot be performed.
"""
def __init__(self, reason):
self.reason = reason
def __str__(self):
return self.reason
class UploadError(Exception):
"""A bundle upload to Gerrit did not succeed.
"""
def __init__(self, reason):
self.reason = reason
def __str__(self):
return self.reason
class NoSuchProjectError(Exception):
"""A specified project does not exist in the work tree.
"""
def __init__(self, name=None):
self.name = name
def __str__(self):
if self.Name is None:
return 'in current directory'
return self.name
class RepoChangedException(Exception):
"""Thrown if 'repo sync' results in repo updating its internal
repo or manifest repositories. In this special case we must
use exec to re-execute repo with the new code and manifest.
"""
def __init__(self, extra_args=[]):
self.extra_args = extra_args
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import shutil
from error import GitError
from error import ManifestParseError
from git_command import GitCommand
from git_config import GitConfig
from git_config import IsId
from manifest import Manifest
from progress import Progress
from project import RemoteSpec
from project import Project
from project import MetaProject
from project import R_HEADS
from project import HEAD
from project import _lwrite
import manifest_xml
GITLINK = '160000'
def _rmdir(dir, top):
while dir != top:
try:
os.rmdir(dir)
except OSError:
break
dir = os.path.dirname(dir)
def _rmref(gitdir, ref):
os.remove(os.path.join(gitdir, ref))
log = os.path.join(gitdir, 'logs', ref)
if os.path.exists(log):
os.remove(log)
_rmdir(os.path.dirname(log), gitdir)
def _has_gitmodules(d):
return os.path.exists(os.path.join(d, '.gitmodules'))
class SubmoduleManifest(Manifest):
"""manifest from .gitmodules file"""
@classmethod
def Is(cls, repodir):
return _has_gitmodules(os.path.dirname(repodir)) \
or _has_gitmodules(os.path.join(repodir, 'manifest')) \
or _has_gitmodules(os.path.join(repodir, 'manifests'))
@classmethod
def IsBare(cls, p):
try:
p.bare_git.cat_file('-e', '%s:.gitmodules' % p.GetRevisionId())
except GitError:
return False
return True
def __init__(self, repodir):
Manifest.__init__(self, repodir)
gitdir = os.path.join(repodir, 'manifest.git')
config = GitConfig.ForRepository(gitdir = gitdir)
if config.GetBoolean('repo.mirror'):
worktree = os.path.join(repodir, 'manifest')
relpath = None
else:
worktree = self.topdir
relpath = '.'
self.manifestProject = MetaProject(self, '__manifest__',
gitdir = gitdir,
worktree = worktree,
relpath = relpath)
self._modules = GitConfig(os.path.join(worktree, '.gitmodules'),
pickleFile = os.path.join(
repodir, '.repopickle_gitmodules'
))
self._review = GitConfig(os.path.join(worktree, '.review'),
pickleFile = os.path.join(
repodir, '.repopickle_review'
))
self._Unload()
@property
def projects(self):
self._Load()
return self._projects
@property
def notice(self):
return self._modules.GetString('repo.notice')
def InitBranch(self):
m = self.manifestProject
if m.CurrentBranch is None:
b = m.revisionExpr
if b.startswith(R_HEADS):
b = b[len(R_HEADS):]
return m.StartBranch(b)
return True
def SetMRefs(self, project):
if project.revisionId is None:
# Special project, e.g. the manifest or repo executable.
#
return
ref = 'refs/remotes/m'
cur = project.bare_ref.get(ref)
exp = project.revisionId
if cur != exp:
msg = 'manifest set to %s' % exp
project.bare_git.UpdateRef(ref, exp, message = msg, detach = True)
ref = 'refs/remotes/m-revision'
cur = project.bare_ref.symref(ref)
exp = project.revisionExpr
if exp is None:
if cur:
_rmref(project.gitdir, ref)
elif cur != exp:
remote = project.GetRemote(project.remote.name)
dst = remote.ToLocal(exp)
msg = 'manifest set to %s (%s)' % (exp, dst)
project.bare_git.symbolic_ref('-m', msg, ref, dst)
def Upgrade_Local(self, old):
if isinstance(old, manifest_xml.XmlManifest):
self.FromXml_Local_1(old, checkout=True)
self.FromXml_Local_2(old)
else:
raise ManifestParseError, 'cannot upgrade manifest'
def FromXml_Local_1(self, old, checkout):
os.rename(old.manifestProject.gitdir,
os.path.join(old.repodir, 'manifest.git'))
oldmp = old.manifestProject
oldBranch = oldmp.CurrentBranch
b = oldmp.GetBranch(oldBranch).merge
if not b:
raise ManifestParseError, 'cannot upgrade manifest'
if b.startswith(R_HEADS):
b = b[len(R_HEADS):]
newmp = self.manifestProject
self._CleanOldMRefs(newmp)
if oldBranch != b:
newmp.bare_git.branch('-m', oldBranch, b)
newmp.config.ClearCache()
old_remote = newmp.GetBranch(b).remote.name
act_remote = self._GuessRemoteName(old)
if old_remote != act_remote:
newmp.bare_git.remote('rename', old_remote, act_remote)
newmp.config.ClearCache()
newmp.remote.name = act_remote
print >>sys.stderr, "Assuming remote named '%s'" % act_remote
if checkout:
for p in old.projects.values():
for c in p.copyfiles:
if os.path.exists(c.abs_dest):
os.remove(c.abs_dest)
newmp._InitWorkTree()
else:
newmp._LinkWorkTree()
_lwrite(os.path.join(newmp.worktree,'.git',HEAD),
'ref: refs/heads/%s\n' % b)
def _GuessRemoteName(self, old):
used = {}
for p in old.projects.values():
n = p.remote.name
used[n] = used.get(n, 0) + 1
remote_name = 'origin'
remote_used = 0
for n in used.keys():
if remote_used < used[n]:
remote_used = used[n]
remote_name = n
return remote_name
def FromXml_Local_2(self, old):
shutil.rmtree(old.manifestProject.worktree)
os.remove(old._manifestFile)
my_remote = self._Remote().name
new_base = os.path.join(self.repodir, 'projects')
old_base = os.path.join(self.repodir, 'projects.old')
os.rename(new_base, old_base)
os.makedirs(new_base)
info = []
pm = Progress('Converting projects', len(self.projects))
for p in self.projects.values():
pm.update()
old_p = old.projects.get(p.name)
old_gitdir = os.path.join(old_base, '%s.git' % p.relpath)
if not os.path.isdir(old_gitdir):
continue
parent = os.path.dirname(p.gitdir)
if not os.path.isdir(parent):
os.makedirs(parent)
os.rename(old_gitdir, p.gitdir)
_rmdir(os.path.dirname(old_gitdir), self.repodir)
if not os.path.isdir(p.worktree):
os.makedirs(p.worktree)
if os.path.isdir(os.path.join(p.worktree, '.git')):
p._LinkWorkTree(relink=True)
self._CleanOldMRefs(p)
if old_p and old_p.remote.name != my_remote:
info.append("%s/: renamed remote '%s' to '%s'" \
% (p.relpath, old_p.remote.name, my_remote))
p.bare_git.remote('rename', old_p.remote.name, my_remote)
p.config.ClearCache()
self.SetMRefs(p)
pm.end()
for i in info:
print >>sys.stderr, i
def _CleanOldMRefs(self, p):
all_refs = p._allrefs
for ref in all_refs.keys():
if ref.startswith(manifest_xml.R_M):
if p.bare_ref.symref(ref) != '':
_rmref(p.gitdir, ref)
else:
p.bare_git.DeleteRef(ref, all_refs[ref])
def FromXml_Definition(self, old):
"""Convert another manifest representation to this one.
"""
mp = self.manifestProject
gm = self._modules
gr = self._review
fd = open(os.path.join(mp.worktree, '.gitignore'), 'ab')
fd.write('/.repo\n')
fd.close()
sort_projects = list(old.projects.keys())
sort_projects.sort()
b = mp.GetBranch(mp.CurrentBranch).merge
if b.startswith(R_HEADS):
b = b[len(R_HEADS):]
if old.notice:
gm.SetString('repo.notice', old.notice)
info = []
pm = Progress('Converting manifest', len(sort_projects))
for p in sort_projects:
pm.update()
p = old.projects[p]
gm.SetString('submodule.%s.path' % p.name, p.relpath)
gm.SetString('submodule.%s.url' % p.name, p.remote.url)
if gr.GetString('review.url') is None:
gr.SetString('review.url', p.remote.review)
elif gr.GetString('review.url') != p.remote.review:
gr.SetString('review.%s.url' % p.name, p.remote.review)
r = p.revisionExpr
if r and not IsId(r):
if r.startswith(R_HEADS):
r = r[len(R_HEADS):]
if r == b:
r = '.'
gm.SetString('submodule.%s.revision' % p.name, r)
for c in p.copyfiles:
info.append('Moved %s out of %s' % (c.src, p.relpath))
c._Copy()
p.work_git.rm(c.src)
mp.work_git.add(c.dest)
self.SetRevisionId(p.relpath, p.GetRevisionId())
mp.work_git.add('.gitignore', '.gitmodules', '.review')
pm.end()
for i in info:
print >>sys.stderr, i
def _Unload(self):
self._loaded = False
self._projects = {}
self._revisionIds = None
self.branch = None
def _Load(self):
if not self._loaded:
f = os.path.join(self.repodir, manifest_xml.LOCAL_MANIFEST_NAME)
if os.path.exists(f):
print >>sys.stderr, 'warning: ignoring %s' % f
m = self.manifestProject
b = m.CurrentBranch
if not b:
raise ManifestParseError, 'manifest cannot be on detached HEAD'
b = m.GetBranch(b).merge
if b.startswith(R_HEADS):
b = b[len(R_HEADS):]
self.branch = b
m.remote.name = self._Remote().name
self._ParseModules()
if self.IsMirror:
self._AddMetaProjectMirror(self.repoProject)
self._AddMetaProjectMirror(self.manifestProject)
self._loaded = True
def _ParseModules(self):
byPath = dict()
for name in self._modules.GetSubSections('submodule'):
p = self._ParseProject(name)
if self._projects.get(p.name):
raise ManifestParseError, 'duplicate project "%s"' % p.name
if byPath.get(p.relpath):
raise ManifestParseError, 'duplicate path "%s"' % p.relpath
self._projects[p.name] = p
byPath[p.relpath] = p
for relpath in self._allRevisionIds.keys():
if relpath not in byPath:
raise ManifestParseError, \
'project "%s" not in .gitmodules' \
% relpath
def _Remote(self):
m = self.manifestProject
b = m.GetBranch(m.CurrentBranch)
return b.remote
def _ResolveUrl(self, url):
if url.startswith('./') or url.startswith('../'):
base = self._Remote().url
try:
base = base[:base.rindex('/')+1]
except ValueError:
base = base[:base.rindex(':')+1]
if url.startswith('./'):
url = url[2:]
while '/' in base and url.startswith('../'):
base = base[:base.rindex('/')+1]
url = url[3:]
return base + url
return url
def _GetRevisionId(self, path):
return self._allRevisionIds.get(path)
@property
def _allRevisionIds(self):
if self._revisionIds is None:
a = dict()
p = GitCommand(self.manifestProject,
['ls-files','-z','--stage'],
capture_stdout = True)
for line in p.process.stdout.read().split('\0')[:-1]:
l_info, l_path = line.split('\t', 2)
l_mode, l_id, l_stage = l_info.split(' ', 2)
if l_mode == GITLINK and l_stage == '0':
a[l_path] = l_id
p.Wait()
self._revisionIds = a
return self._revisionIds
def SetRevisionId(self, path, id):
self.manifestProject.work_git.update_index(
'--add','--cacheinfo', GITLINK, id, path)
def _ParseProject(self, name):
gm = self._modules
gr = self._review
path = gm.GetString('submodule.%s.path' % name)
if not path:
path = name
revId = self._GetRevisionId(path)
if not revId:
raise ManifestParseError(
'submodule "%s" has no revision at "%s"' \
% (name, path))
url = gm.GetString('submodule.%s.url' % name)
if not url:
url = name
url = self._ResolveUrl(url)
review = gr.GetString('review.%s.url' % name)
if not review:
review = gr.GetString('review.url')
if not review:
review = self._Remote().review
remote = RemoteSpec(self._Remote().name, url, review)
revExpr = gm.GetString('submodule.%s.revision' % name)
if revExpr == '.':
revExpr = self.branch
if self.IsMirror:
relpath = None
worktree = None
gitdir = os.path.join(self.topdir, '%s.git' % name)
else:
worktree = os.path.join(self.topdir, path)
gitdir = os.path.join(self.repodir, 'projects/%s.git' % name)
return Project(manifest = self,
name = name,
remote = remote,
gitdir = gitdir,
worktree = worktree,
relpath = path,
revisionExpr = revExpr,
revisionId = revId)
def _AddMetaProjectMirror(self, m):
m_url = m.GetRemote(m.remote.name).url
if m_url.endswith('/.git'):
raise ManifestParseError, 'refusing to mirror %s' % m_url
name = self._GuessMetaName(m_url)
if name.endswith('.git'):
name = name[:-4]
if name not in self._projects:
m.PreSync()
gitdir = os.path.join(self.topdir, '%s.git' % name)
project = Project(manifest = self,
name = name,
remote = RemoteSpec(self._Remote().name, m_url),
gitdir = gitdir,
worktree = None,
relpath = None,
revisionExpr = m.revisionExpr,
revisionId = None)
self._projects[project.name] = project
def _GuessMetaName(self, m_url):
parts = m_url.split('/')
name = parts[-1]
parts = parts[0:-1]
s = len(parts) - 1
while s > 0:
l = '/'.join(parts[0:s]) + '/'
r = '/'.join(parts[s:]) + '/'
for p in self._projects.values():
if p.name.startswith(r) and p.remote.url.startswith(l):
return r + name
s -= 1
return m_url[m_url.rindex('/') + 1:]
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import select
import sys
active = False
def RunPager(globalConfig):
global active
if not os.isatty(0) or not os.isatty(1):
return
pager = _SelectPager(globalConfig)
if pager == '' or pager == 'cat':
return
# This process turns into the pager; a child it forks will
# do the real processing and output back to the pager. This
# is necessary to keep the pager in control of the tty.
#
try:
r, w = os.pipe()
pid = os.fork()
if not pid:
os.dup2(w, 1)
os.dup2(w, 2)
os.close(r)
os.close(w)
active = True
return
os.dup2(r, 0)
os.close(r)
os.close(w)
_BecomePager(pager)
except Exception:
print >>sys.stderr, "fatal: cannot start pager '%s'" % pager
os.exit(255)
def _SelectPager(globalConfig):
try:
return os.environ['GIT_PAGER']
except KeyError:
pass
pager = globalConfig.GetString('core.pager')
if pager:
return pager
try:
return os.environ['PAGER']
except KeyError:
pass
return 'less'
def _BecomePager(pager):
# Delaying execution of the pager until we have output
# ready works around a long-standing bug in popularly
# available versions of 'less', a better 'more'.
#
a, b, c = select.select([0], [], [0])
os.environ['LESS'] = 'FRSX'
try:
os.execvp(pager, [pager])
except OSError, e:
os.execv('/bin/sh', ['sh', '-c', pager])
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from optparse import SUPPRESS_HELP
from color import Coloring
from command import PagedCommand
from git_command import git_require, GitCommand
class GrepColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'grep')
self.project = self.printer('project', attr='bold')
class Grep(PagedCommand):
common = True
helpSummary = "Print lines matching a pattern"
helpUsage = """
%prog {pattern | -e pattern} [<project>...]
"""
helpDescription = """
Search for the specified patterns in all project files.
Boolean Options
---------------
The following options can appear as often as necessary to express
the pattern to locate:
-e PATTERN
--and, --or, --not, -(, -)
Further, the -r/--revision option may be specified multiple times
in order to scan multiple trees. If the same file matches in more
than one tree, only the first result is reported, prefixed by the
revision name it was found under.
Examples
-------
Look for a line that has '#define' and either 'MAX_PATH or 'PATH_MAX':
repo grep -e '#define' --and -\( -e MAX_PATH -e PATH_MAX \)
Look for a line that has 'NODE' or 'Unexpected' in files that
contain a line that matches both expressions:
repo grep --all-match -e NODE -e Unexpected
"""
def _Options(self, p):
def carry(option,
opt_str,
value,
parser):
pt = getattr(parser.values, 'cmd_argv', None)
if pt is None:
pt = []
setattr(parser.values, 'cmd_argv', pt)
if opt_str == '-(':
pt.append('(')
elif opt_str == '-)':
pt.append(')')
else:
pt.append(opt_str)
if value is not None:
pt.append(value)
g = p.add_option_group('Sources')
g.add_option('--cached',
action='callback', callback=carry,
help='Search the index, instead of the work tree')
g.add_option('-r','--revision',
dest='revision', action='append', metavar='TREEish',
help='Search TREEish, instead of the work tree')
g = p.add_option_group('Pattern')
g.add_option('-e',
action='callback', callback=carry,
metavar='PATTERN', type='str',
help='Pattern to search for')
g.add_option('-i', '--ignore-case',
action='callback', callback=carry,
help='Ignore case differences')
g.add_option('-a','--text',
action='callback', callback=carry,
help="Process binary files as if they were text")
g.add_option('-I',
action='callback', callback=carry,
help="Don't match the pattern in binary files")
g.add_option('-w', '--word-regexp',
action='callback', callback=carry,
help='Match the pattern only at word boundaries')
g.add_option('-v', '--invert-match',
action='callback', callback=carry,
help='Select non-matching lines')
g.add_option('-G', '--basic-regexp',
action='callback', callback=carry,
help='Use POSIX basic regexp for patterns (default)')
g.add_option('-E', '--extended-regexp',
action='callback', callback=carry,
help='Use POSIX extended regexp for patterns')
g.add_option('-F', '--fixed-strings',
action='callback', callback=carry,
help='Use fixed strings (not regexp) for pattern')
g = p.add_option_group('Pattern Grouping')
g.add_option('--all-match',
action='callback', callback=carry,
help='Limit match to lines that have all patterns')
g.add_option('--and', '--or', '--not',
action='callback', callback=carry,
help='Boolean operators to combine patterns')
g.add_option('-(','-)',
action='callback', callback=carry,
help='Boolean operator grouping')
g = p.add_option_group('Output')
g.add_option('-n',
action='callback', callback=carry,
help='Prefix the line number to matching lines')
g.add_option('-C',
action='callback', callback=carry,
metavar='CONTEXT', type='str',
help='Show CONTEXT lines around match')
g.add_option('-B',
action='callback', callback=carry,
metavar='CONTEXT', type='str',
help='Show CONTEXT lines before match')
g.add_option('-A',
action='callback', callback=carry,
metavar='CONTEXT', type='str',
help='Show CONTEXT lines after match')
g.add_option('-l','--name-only','--files-with-matches',
action='callback', callback=carry,
help='Show only file names containing matching lines')
g.add_option('-L','--files-without-match',
action='callback', callback=carry,
help='Show only file names not containing matching lines')
def Execute(self, opt, args):
out = GrepColoring(self.manifest.manifestProject.config)
cmd_argv = ['grep']
if out.is_on and git_require((1,6,3)):
cmd_argv.append('--color')
cmd_argv.extend(getattr(opt,'cmd_argv',[]))
if '-e' not in cmd_argv:
if not args:
self.Usage()
cmd_argv.append('-e')
cmd_argv.append(args[0])
args = args[1:]
projects = self.GetProjects(args)
full_name = False
if len(projects) > 1:
cmd_argv.append('--full-name')
full_name = True
have_rev = False
if opt.revision:
if '--cached' in cmd_argv:
print >>sys.stderr,\
'fatal: cannot combine --cached and --revision'
sys.exit(1)
have_rev = True
cmd_argv.extend(opt.revision)
cmd_argv.append('--')
bad_rev = False
have_match = False
for project in projects:
p = GitCommand(project,
cmd_argv,
bare = False,
capture_stdout = True,
capture_stderr = True)
if p.Wait() != 0:
# no results
#
if p.stderr:
if have_rev and 'fatal: ambiguous argument' in p.stderr:
bad_rev = True
else:
out.project('--- project %s ---' % project.relpath)
out.nl()
out.write("%s", p.stderr)
out.nl()
continue
have_match = True
# We cut the last element, to avoid a blank line.
#
r = p.stdout.split('\n')
r = r[0:-1]
if have_rev and full_name:
for line in r:
rev, line = line.split(':', 1)
out.write("%s", rev)
out.write(':')
out.project(project.relpath)
out.write('/')
out.write("%s", line)
out.nl()
elif full_name:
for line in r:
out.project(project.relpath)
out.write('/')
out.write("%s", line)
out.nl()
else:
for line in r:
print line
if have_match:
sys.exit(0)
elif have_rev and bad_rev:
for r in opt.revision:
print >>sys.stderr, "error: can't search revision %s" % r
sys.exit(1)
else:
sys.exit(1)
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from color import Coloring
from command import Command
class BranchColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'branch')
self.current = self.printer('current', fg='green')
self.local = self.printer('local')
self.notinproject = self.printer('notinproject', fg='red')
class BranchInfo(object):
def __init__(self, name):
self.name = name
self.current = 0
self.published = 0
self.published_equal = 0
self.projects = []
def add(self, b):
if b.current:
self.current += 1
if b.published:
self.published += 1
if b.revision == b.published:
self.published_equal += 1
self.projects.append(b)
@property
def IsCurrent(self):
return self.current > 0
@property
def IsPublished(self):
return self.published > 0
@property
def IsPublishedEqual(self):
return self.published_equal == len(self.projects)
class Branches(Command):
common = True
helpSummary = "View current topic branches"
helpUsage = """
%prog [<project>...]
Summarizes the currently available topic branches.
Branch Display
--------------
The branch display output by this command is organized into four
columns of information; for example:
*P nocolor | in repo
repo2 |
The first column contains a * if the branch is the currently
checked out branch in any of the specified projects, or a blank
if no project has the branch checked out.
The second column contains either blank, p or P, depending upon
the upload status of the branch.
(blank): branch not yet published by repo upload
P: all commits were published by repo upload
p: only some commits were published by repo upload
The third column contains the branch name.
The fourth column (after the | separator) lists the projects that
the branch appears in, or does not appear in. If no project list
is shown, then the branch appears in all projects.
"""
def Execute(self, opt, args):
projects = self.GetProjects(args)
out = BranchColoring(self.manifest.manifestProject.config)
all = {}
project_cnt = len(projects)
for project in projects:
for name, b in project.GetBranches().iteritems():
b.project = project
if name not in all:
all[name] = BranchInfo(name)
all[name].add(b)
names = all.keys()
names.sort()
if not names:
print >>sys.stderr, ' (no branches)'
return
width = 25
for name in names:
if width < len(name):
width = len(name)
for name in names:
i = all[name]
in_cnt = len(i.projects)
if i.IsCurrent:
current = '*'
hdr = out.current
else:
current = ' '
hdr = out.local
if i.IsPublishedEqual:
published = 'P'
elif i.IsPublished:
published = 'p'
else:
published = ' '
hdr('%c%c %-*s' % (current, published, width, name))
out.write(' |')
if in_cnt < project_cnt:
fmt = out.write
paths = []
if in_cnt < project_cnt - in_cnt:
type = 'in'
for b in i.projects:
paths.append(b.project.relpath)
else:
fmt = out.notinproject
type = 'not in'
have = set()
for b in i.projects:
have.add(b.project)
for p in projects:
if not p in have:
paths.append(p.relpath)
s = ' %s %s' % (type, ', '.join(paths))
if width + 7 + len(s) < 80:
fmt(s)
else:
fmt(' %s:' % type)
for p in paths:
out.nl()
fmt(width*' ' + ' %s' % p)
else:
out.write(' in all projects')
out.nl()
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from command import PagedCommand
class Diff(PagedCommand):
common = True
helpSummary = "Show changes between commit and working tree"
helpUsage = """
%prog [<project>...]
"""
def Execute(self, opt, args):
for project in self.GetProjects(args):
project.PrintWorkTreeDiff()
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from command import Command
from git_command import git
from progress import Progress
class Abandon(Command):
common = True
helpSummary = "Permanently abandon a development branch"
helpUsage = """
%prog <branchname> [<project>...]
This subcommand permanently abandons a development branch by
deleting it (and all its history) from your local repository.
It is equivalent to "git branch -D <branchname>".
"""
def Execute(self, opt, args):
if not args:
self.Usage()
nb = args[0]
if not git.check_ref_format('heads/%s' % nb):
print >>sys.stderr, "error: '%s' is not a valid name" % nb
sys.exit(1)
nb = args[0]
err = []
all = self.GetProjects(args[1:])
pm = Progress('Abandon %s' % nb, len(all))
for project in all:
pm.update()
if not project.AbandonBranch(nb):
err.append(project)
pm.end()
if err:
if len(err) == len(all):
print >>sys.stderr, 'error: no project has branch %s' % nb
else:
for p in err:
print >>sys.stderr,\
"error: %s/: cannot abandon %s" \
% (p.relpath, nb)
sys.exit(1)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from command import Command
from git_command import git
from progress import Progress
class Start(Command):
common = True
helpSummary = "Start a new branch for development"
helpUsage = """
%prog <newbranchname> [--all | <project>...]
"""
helpDescription = """
'%prog' begins a new branch of development, starting from the
revision specified in the manifest.
"""
def _Options(self, p):
p.add_option('--all',
dest='all', action='store_true',
help='begin branch in all projects')
def Execute(self, opt, args):
if not args:
self.Usage()
nb = args[0]
if not git.check_ref_format('heads/%s' % nb):
print >>sys.stderr, "error: '%s' is not a valid name" % nb
sys.exit(1)
err = []
projects = []
if not opt.all:
projects = args[1:]
if len(projects) < 1:
print >>sys.stderr, "error: at least one project must be specified"
sys.exit(1)
all = self.GetProjects(projects)
pm = Progress('Starting %s' % nb, len(all))
for project in all:
pm.update()
if not project.StartBranch(nb):
err.append(project)
pm.end()
if err:
for p in err:
print >>sys.stderr,\
"error: %s/: cannot start %s" \
% (p.relpath, nb)
sys.exit(1)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from color import Coloring
from command import InteractiveCommand, MirrorSafeCommand
from error import ManifestParseError
from project import SyncBuffer
from git_command import git_require, MIN_GIT_VERSION
from manifest_submodule import SubmoduleManifest
from manifest_xml import XmlManifest
from subcmds.sync import _ReloadManifest
class Init(InteractiveCommand, MirrorSafeCommand):
common = True
helpSummary = "Initialize repo in the current directory"
helpUsage = """
%prog [options]
"""
helpDescription = """
The '%prog' command is run once to install and initialize repo.
The latest repo source code and manifest collection is downloaded
from the server and is installed in the .repo/ directory in the
current working directory.
The optional -b argument can be used to select the manifest branch
to checkout and use. If no branch is specified, master is assumed.
The optional -m argument can be used to specify an alternate manifest
to be used. If no manifest is specified, the manifest default.xml
will be used.
The --reference option can be used to point to a directory that
has the content of a --mirror sync. This will make the working
directory use as much data as possible from the local reference
directory when fetching from the server. This will make the sync
go a lot faster by reducing data traffic on the network.
Switching Manifest Branches
---------------------------
To switch to another manifest branch, `repo init -b otherbranch`
may be used in an existing client. However, as this only updates the
manifest, a subsequent `repo sync` (or `repo sync -d`) is necessary
to update the working directory files.
"""
def _Options(self, p):
# Logging
g = p.add_option_group('Logging options')
g.add_option('-q', '--quiet',
dest="quiet", action="store_true", default=False,
help="be quiet")
# Manifest
g = p.add_option_group('Manifest options')
g.add_option('-u', '--manifest-url',
dest='manifest_url',
help='manifest repository location', metavar='URL')
g.add_option('-b', '--manifest-branch',
dest='manifest_branch',
help='manifest branch or revision', metavar='REVISION')
g.add_option('-o', '--origin',
dest='manifest_origin',
help="use REMOTE instead of 'origin' to track upstream",
metavar='REMOTE')
if isinstance(self.manifest, XmlManifest) \
or not self.manifest.manifestProject.Exists:
g.add_option('-m', '--manifest-name',
dest='manifest_name', default='default.xml',
help='initial manifest file', metavar='NAME.xml')
g.add_option('--mirror',
dest='mirror', action='store_true',
help='mirror the forrest')
g.add_option('--reference',
dest='reference',
help='location of mirror directory', metavar='DIR')
# Tool
g = p.add_option_group('repo Version options')
g.add_option('--repo-url',
dest='repo_url',
help='repo repository location', metavar='URL')
g.add_option('--repo-branch',
dest='repo_branch',
help='repo branch or revision', metavar='REVISION')
g.add_option('--no-repo-verify',
dest='no_repo_verify', action='store_true',
help='do not verify repo source code')
def _ApplyOptions(self, opt, is_new):
m = self.manifest.manifestProject
if is_new:
if opt.manifest_origin:
m.remote.name = opt.manifest_origin
if opt.manifest_branch:
m.revisionExpr = opt.manifest_branch
else:
m.revisionExpr = 'refs/heads/master'
else:
if opt.manifest_origin:
print >>sys.stderr, 'fatal: cannot change origin name'
sys.exit(1)
if opt.manifest_branch:
m.revisionExpr = opt.manifest_branch
else:
m.PreSync()
def _SyncManifest(self, opt):
m = self.manifest.manifestProject
is_new = not m.Exists
if is_new:
if not opt.manifest_url:
print >>sys.stderr, 'fatal: manifest url (-u) is required.'
sys.exit(1)
if not opt.quiet:
print >>sys.stderr, 'Getting manifest ...'
print >>sys.stderr, ' from %s' % opt.manifest_url
m._InitGitDir()
self._ApplyOptions(opt, is_new)
if opt.manifest_url:
r = m.GetRemote(m.remote.name)
r.url = opt.manifest_url
r.ResetFetch()
r.Save()
if opt.reference:
m.config.SetString('repo.reference', opt.reference)
if opt.mirror:
if is_new:
m.config.SetString('repo.mirror', 'true')
m.config.ClearCache()
else:
print >>sys.stderr, 'fatal: --mirror not supported on existing client'
sys.exit(1)
if not m.Sync_NetworkHalf():
r = m.GetRemote(m.remote.name)
print >>sys.stderr, 'fatal: cannot obtain manifest %s' % r.url
sys.exit(1)
if is_new and SubmoduleManifest.IsBare(m):
new = self.GetManifest(reparse=True, type=SubmoduleManifest)
if m.gitdir != new.manifestProject.gitdir:
os.rename(m.gitdir, new.manifestProject.gitdir)
new = self.GetManifest(reparse=True, type=SubmoduleManifest)
m = new.manifestProject
self._ApplyOptions(opt, is_new)
if not is_new:
# Force the manifest to load if it exists, the old graph
# may be needed inside of _ReloadManifest().
#
self.manifest.projects
syncbuf = SyncBuffer(m.config)
m.Sync_LocalHalf(syncbuf)
syncbuf.Finish()
if isinstance(self.manifest, XmlManifest):
self._LinkManifest(opt.manifest_name)
_ReloadManifest(self)
self._ApplyOptions(opt, is_new)
if not self.manifest.InitBranch():
print >>sys.stderr, 'fatal: cannot create branch in manifest'
sys.exit(1)
def _LinkManifest(self, name):
if not name:
print >>sys.stderr, 'fatal: manifest name (-m) is required.'
sys.exit(1)
try:
self.manifest.Link(name)
except ManifestParseError, e:
print >>sys.stderr, "fatal: manifest '%s' not available" % name
print >>sys.stderr, 'fatal: %s' % str(e)
sys.exit(1)
def _Prompt(self, prompt, value):
mp = self.manifest.manifestProject
sys.stdout.write('%-10s [%s]: ' % (prompt, value))
a = sys.stdin.readline().strip()
if a == '':
return value
return a
def _ConfigureUser(self):
mp = self.manifest.manifestProject
while True:
print ''
name = self._Prompt('Your Name', mp.UserName)
email = self._Prompt('Your Email', mp.UserEmail)
print ''
print 'Your identity is: %s <%s>' % (name, email)
sys.stdout.write('is this correct [y/n]? ')
a = sys.stdin.readline().strip()
if a in ('yes', 'y', 't', 'true'):
break
if name != mp.UserName:
mp.config.SetString('user.name', name)
if email != mp.UserEmail:
mp.config.SetString('user.email', email)
def _HasColorSet(self, gc):
for n in ['ui', 'diff', 'status']:
if gc.Has('color.%s' % n):
return True
return False
def _ConfigureColor(self):
gc = self.manifest.globalConfig
if self._HasColorSet(gc):
return
class _Test(Coloring):
def __init__(self):
Coloring.__init__(self, gc, 'test color display')
self._on = True
out = _Test()
print ''
print "Testing colorized output (for 'repo diff', 'repo status'):"
for c in ['black','red','green','yellow','blue','magenta','cyan']:
out.write(' ')
out.printer(fg=c)(' %-6s ', c)
out.write(' ')
out.printer(fg='white', bg='black')(' %s ' % 'white')
out.nl()
for c in ['bold','dim','ul','reverse']:
out.write(' ')
out.printer(fg='black', attr=c)(' %-6s ', c)
out.nl()
sys.stdout.write('Enable color display in this user account (y/n)? ')
a = sys.stdin.readline().strip().lower()
if a in ('y', 'yes', 't', 'true', 'on'):
gc.SetString('color.ui', 'auto')
def Execute(self, opt, args):
git_require(MIN_GIT_VERSION, fail=True)
self._SyncManifest(opt)
if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
self._ConfigureUser()
self._ConfigureColor()
if self.manifest.IsMirror:
type = 'mirror '
else:
type = ''
print ''
print 'repo %sinitialized in %s' % (type, self.manifest.topdir)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from command import PagedCommand
class Status(PagedCommand):
common = True
helpSummary = "Show the working tree status"
helpUsage = """
%prog [<project>...]
"""
helpDescription = """
'%prog' compares the working tree to the staging area (aka index),
and the most recent commit on this branch (HEAD), in each project
specified. A summary is displayed, one line per file where there
is a difference between these three states.
Status Display
--------------
The status display is organized into three columns of information,
for example if the file 'subcmds/status.py' is modified in the
project 'repo' on branch 'devwork':
project repo/ branch devwork
-m subcmds/status.py
The first column explains how the staging area (index) differs from
the last commit (HEAD). Its values are always displayed in upper
case and have the following meanings:
-: no difference
A: added (not in HEAD, in index )
M: modified ( in HEAD, in index, different content )
D: deleted ( in HEAD, not in index )
R: renamed (not in HEAD, in index, path changed )
C: copied (not in HEAD, in index, copied from another)
T: mode changed ( in HEAD, in index, same content )
U: unmerged; conflict resolution required
The second column explains how the working directory differs from
the index. Its values are always displayed in lower case and have
the following meanings:
-: new / unknown (not in index, in work tree )
m: modified ( in index, in work tree, modified )
d: deleted ( in index, not in work tree )
"""
def Execute(self, opt, args):
all = self.GetProjects(args)
clean = 0
on = {}
for project in all:
cb = project.CurrentBranch
if cb:
if cb not in on:
on[cb] = []
on[cb].append(project)
branch_names = list(on.keys())
branch_names.sort()
for cb in branch_names:
print '# on branch %s' % cb
for project in all:
state = project.PrintWorkTreeStatus()
if state == 'CLEAN':
clean += 1
if len(all) == clean:
print 'nothing to commit (working directory clean)'
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from optparse import SUPPRESS_HELP
import sys
from command import Command, MirrorSafeCommand
from subcmds.sync import _PostRepoUpgrade
from subcmds.sync import _PostRepoFetch
class Selfupdate(Command, MirrorSafeCommand):
common = False
helpSummary = "Update repo to the latest version"
helpUsage = """
%prog
"""
helpDescription = """
The '%prog' command upgrades repo to the latest version, if a
newer version is available.
Normally this is done automatically by 'repo sync' and does not
need to be performed by an end-user.
"""
def _Options(self, p):
g = p.add_option_group('repo Version options')
g.add_option('--no-repo-verify',
dest='no_repo_verify', action='store_true',
help='do not verify repo source code')
g.add_option('--repo-upgraded',
dest='repo_upgraded', action='store_true',
help=SUPPRESS_HELP)
def Execute(self, opt, args):
rp = self.manifest.repoProject
rp.PreSync()
if opt.repo_upgraded:
_PostRepoUpgrade(self.manifest)
else:
if not rp.Sync_NetworkHalf():
print >>sys.stderr, "error: can't update repo"
sys.exit(1)
rp.bare_git.gc('--auto')
_PostRepoFetch(rp,
no_repo_verify = opt.no_repo_verify,
verbose = True)
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from command import Command
from progress import Progress
class Checkout(Command):
common = True
helpSummary = "Checkout a branch for development"
helpUsage = """
%prog <branchname> [<project>...]
"""
helpDescription = """
The '%prog' command checks out an existing branch that was previously
created by 'repo start'.
The command is equivalent to:
repo forall [<project>...] -c git checkout <branchname>
"""
def Execute(self, opt, args):
if not args:
self.Usage()
nb = args[0]
err = []
all = self.GetProjects(args[1:])
pm = Progress('Checkout %s' % nb, len(all))
for project in all:
pm.update()
if not project.CheckoutBranch(nb):
err.append(project)
pm.end()
if err:
if len(err) == len(all):
print >>sys.stderr, 'error: no project has branch %s' % nb
else:
for p in err:
print >>sys.stderr,\
"error: %s/: cannot checkout %s" \
% (p.relpath, nb)
sys.exit(1)
| Python |
#
# Copyright (C) 2010 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from command import Command
from git_command import GitCommand
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB
from error import GitError
class Rebase(Command):
common = True
helpSummary = "Rebase local branches on upstream branch"
helpUsage = """
%prog {[<project>...] | -i <project>...}
"""
helpDescription = """
'%prog' uses git rebase to move local changes in the current topic branch to
the HEAD of the upstream history, useful when you have made commits in a topic
branch but need to incorporate new upstream changes "underneath" them.
"""
def _Options(self, p):
p.add_option('-i', '--interactive',
dest="interactive", action="store_true",
help="interactive rebase (single project only)")
p.add_option('-f', '--force-rebase',
dest='force_rebase', action='store_true',
help='Pass --force-rebase to git rebase')
p.add_option('--no-ff',
dest='no_ff', action='store_true',
help='Pass --no-ff to git rebase')
p.add_option('-q', '--quiet',
dest='quiet', action='store_true',
help='Pass --quiet to git rebase')
p.add_option('--autosquash',
dest='autosquash', action='store_true',
help='Pass --autosquash to git rebase')
p.add_option('--whitespace',
dest='whitespace', action='store', metavar='WS',
help='Pass --whitespace to git rebase')
def Execute(self, opt, args):
all = self.GetProjects(args)
one_project = len(all) == 1
if opt.interactive and not one_project:
print >>sys.stderr, 'error: interactive rebase not supported with multiple projects'
return -1
for project in all:
cb = project.CurrentBranch
if not cb:
if one_project:
print >>sys.stderr, "error: project %s has a detatched HEAD" % project.relpath
return -1
# ignore branches with detatched HEADs
continue
upbranch = project.GetBranch(cb)
if not upbranch.LocalMerge:
if one_project:
print >>sys.stderr, "error: project %s does not track any remote branches" % project.relpath
return -1
# ignore branches without remotes
continue
args = ["rebase"]
if opt.whitespace:
args.append('--whitespace=%s' % opt.whitespace)
if opt.quiet:
args.append('--quiet')
if opt.force_rebase:
args.append('--force-rebase')
if opt.no_ff:
args.append('--no-ff')
if opt.autosquash:
args.append('--autosquash')
if opt.interactive:
args.append("-i")
args.append(upbranch.LocalMerge)
print >>sys.stderr, '# %s: rebasing %s -> %s' % \
(project.relpath, cb, upbranch.LocalMerge)
if GitCommand(project, args).Wait() != 0:
return -1
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from command import Command, MirrorSafeCommand
from git_command import git
from project import HEAD
class Version(Command, MirrorSafeCommand):
common = False
helpSummary = "Display the version of repo"
helpUsage = """
%prog
"""
def Execute(self, opt, args):
rp = self.manifest.repoProject
rem = rp.GetRemote(rp.remote.name)
print 'repo version %s' % rp.work_git.describe(HEAD)
print ' (from %s)' % rem.url
print git.version().strip()
print 'Python %s' % sys.version
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import sys
from command import Command
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
class Download(Command):
common = True
helpSummary = "Download and checkout a change"
helpUsage = """
%prog {project change[/patchset]}...
"""
helpDescription = """
The '%prog' command downloads a change from the review system and
makes it available in your project's local working directory.
"""
def _Options(self, p):
pass
def _ParseChangeIds(self, args):
if not args:
self.Usage()
to_get = []
project = None
for a in args:
m = CHANGE_RE.match(a)
if m:
if not project:
self.Usage()
chg_id = int(m.group(1))
if m.group(2):
ps_id = int(m.group(2))
else:
ps_id = 1
to_get.append((project, chg_id, ps_id))
else:
project = self.GetProjects([a])[0]
return to_get
def Execute(self, opt, args):
for project, change_id, ps_id in self._ParseChangeIds(args):
dl = project.DownloadPatchSet(change_id, ps_id)
if not dl:
print >>sys.stderr, \
'[%s] change %d/%d not found' \
% (project.name, change_id, ps_id)
sys.exit(1)
if not dl.commits:
print >>sys.stderr, \
'[%s] change %d/%d has already been merged' \
% (project.name, change_id, ps_id)
continue
if len(dl.commits) > 1:
print >>sys.stderr, \
'[%s] %d/%d depends on %d unmerged changes:' \
% (project.name, change_id, ps_id, len(dl.commits))
for c in dl.commits:
print >>sys.stderr, ' %s' % (c)
project._Checkout(dl.commit)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from color import Coloring
from command import PagedCommand
class Prune(PagedCommand):
common = True
helpSummary = "Prune (delete) already merged topics"
helpUsage = """
%prog [<project>...]
"""
def Execute(self, opt, args):
all = []
for project in self.GetProjects(args):
all.extend(project.PruneHeads())
if not all:
return
class Report(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'status')
self.project = self.printer('header', attr='bold')
out = Report(all[0].project.config)
out.project('Pending Branches')
out.nl()
project = None
for branch in all:
if project != branch.project:
project = branch.project
out.nl()
out.project('project %s/' % project.relpath)
out.nl()
commits = branch.commits
date = branch.date
print '%s %-33s (%2d commit%s, %s)' % (
branch.name == project.CurrentBranch and '*' or ' ',
branch.name,
len(commits),
len(commits) != 1 and 's' or ' ',
date)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
from formatter import AbstractFormatter, DumbWriter
from color import Coloring
from command import PagedCommand, MirrorSafeCommand
class Help(PagedCommand, MirrorSafeCommand):
common = False
helpSummary = "Display detailed help on a command"
helpUsage = """
%prog [--all|command]
"""
helpDescription = """
Displays detailed usage information about a command.
"""
def _PrintAllCommands(self):
print 'usage: repo COMMAND [ARGS]'
print """
The complete list of recognized repo commands are:
"""
commandNames = self.commands.keys()
commandNames.sort()
maxlen = 0
for name in commandNames:
maxlen = max(maxlen, len(name))
fmt = ' %%-%ds %%s' % maxlen
for name in commandNames:
command = self.commands[name]
try:
summary = command.helpSummary.strip()
except AttributeError:
summary = ''
print fmt % (name, summary)
print """
See 'repo help <command>' for more information on a specific command.
"""
def _PrintCommonCommands(self):
print 'usage: repo COMMAND [ARGS]'
print """
The most commonly used repo commands are:
"""
commandNames = [name
for name in self.commands.keys()
if self.commands[name].common]
commandNames.sort()
maxlen = 0
for name in commandNames:
maxlen = max(maxlen, len(name))
fmt = ' %%-%ds %%s' % maxlen
for name in commandNames:
command = self.commands[name]
try:
summary = command.helpSummary.strip()
except AttributeError:
summary = ''
print fmt % (name, summary)
print """
See 'repo help <command>' for more information on a specific command.
See 'repo help --all' for a complete list of recognized commands.
"""
def _PrintCommandHelp(self, cmd):
class _Out(Coloring):
def __init__(self, gc):
Coloring.__init__(self, gc, 'help')
self.heading = self.printer('heading', attr='bold')
self.wrap = AbstractFormatter(DumbWriter())
def _PrintSection(self, heading, bodyAttr):
try:
body = getattr(cmd, bodyAttr)
except AttributeError:
return
if body == '' or body is None:
return
self.nl()
self.heading('%s', heading)
self.nl()
self.heading('%s', ''.ljust(len(heading), '-'))
self.nl()
me = 'repo %s' % cmd.NAME
body = body.strip()
body = body.replace('%prog', me)
asciidoc_hdr = re.compile(r'^\n?([^\n]{1,})\n([=~-]{2,})$')
for para in body.split("\n\n"):
if para.startswith(' '):
self.write('%s', para)
self.nl()
self.nl()
continue
m = asciidoc_hdr.match(para)
if m:
title = m.group(1)
type = m.group(2)
if type[0] in ('=', '-'):
p = self.heading
else:
def _p(fmt, *args):
self.write(' ')
self.heading(fmt, *args)
p = _p
p('%s', title)
self.nl()
p('%s', ''.ljust(len(title),type[0]))
self.nl()
continue
self.wrap.add_flowing_data(para)
self.wrap.end_paragraph(1)
self.wrap.end_paragraph(0)
out = _Out(self.manifest.globalConfig)
out._PrintSection('Summary', 'helpSummary')
cmd.OptionParser.print_help()
out._PrintSection('Description', 'helpDescription')
def _Options(self, p):
p.add_option('-a', '--all',
dest='show_all', action='store_true',
help='show the complete list of commands')
def Execute(self, opt, args):
if len(args) == 0:
if opt.show_all:
self._PrintAllCommands()
else:
self._PrintCommonCommands()
elif len(args) == 1:
name = args[0]
try:
cmd = self.commands[name]
except KeyError:
print >>sys.stderr, "repo: '%s' is not a repo command." % name
sys.exit(1)
cmd.repodir = self.repodir
self._PrintCommandHelp(cmd)
else:
self._PrintCommandHelp(self)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
all = {}
my_dir = os.path.dirname(__file__)
for py in os.listdir(my_dir):
if py == '__init__.py':
continue
if py.endswith('.py'):
name = py[:-3]
clsn = name.capitalize()
while clsn.find('_') > 0:
h = clsn.index('_')
clsn = clsn[0:h] + clsn[h + 1:].capitalize()
mod = __import__(__name__,
globals(),
locals(),
['%s' % name])
mod = getattr(mod, name)
try:
cmd = getattr(mod, clsn)()
except AttributeError:
raise SyntaxError, '%s/%s does not define class %s' % (
__name__, py, clsn)
name = name.replace('_', '-')
cmd.NAME = name
all[name] = cmd
if 'help' in all:
all['help'].commands = all
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import fcntl
import re
import os
import select
import sys
import subprocess
from color import Coloring
from command import Command, MirrorSafeCommand
_CAN_COLOR = [
'branch',
'diff',
'grep',
'log',
]
class ForallColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'forall')
self.project = self.printer('project', attr='bold')
class Forall(Command, MirrorSafeCommand):
common = False
helpSummary = "Run a shell command in each project"
helpUsage = """
%prog [<project>...] -c <command> [<arg>...]
"""
helpDescription = """
Executes the same shell command in each project.
Output Formatting
-----------------
The -p option causes '%prog' to bind pipes to the command's stdin,
stdout and stderr streams, and pipe all output into a continuous
stream that is displayed in a single pager session. Project headings
are inserted before the output of each command is displayed. If the
command produces no output in a project, no heading is displayed.
The formatting convention used by -p is very suitable for some
types of searching, e.g. `repo forall -p -c git log -SFoo` will
print all commits that add or remove references to Foo.
The -v option causes '%prog' to display stderr messages if a
command produces output only on stderr. Normally the -p option
causes command output to be suppressed until the command produces
at least one byte of output on stdout.
Environment
-----------
pwd is the project's working directory. If the current client is
a mirror client, then pwd is the Git repository.
REPO_PROJECT is set to the unique name of the project.
REPO_PATH is the path relative the the root of the client.
REPO_REMOTE is the name of the remote system from the manifest.
REPO_LREV is the name of the revision from the manifest, translated
to a local tracking branch. If you need to pass the manifest
revision to a locally executed git command, use REPO_LREV.
REPO_RREV is the name of the revision from the manifest, exactly
as written in the manifest.
shell positional arguments ($1, $2, .., $#) are set to any arguments
following <command>.
Unless -p is used, stdin, stdout, stderr are inherited from the
terminal and are not redirected.
"""
def _Options(self, p):
def cmd(option, opt_str, value, parser):
setattr(parser.values, option.dest, list(parser.rargs))
while parser.rargs:
del parser.rargs[0]
p.add_option('-c', '--command',
help='Command (and arguments) to execute',
dest='command',
action='callback',
callback=cmd)
g = p.add_option_group('Output')
g.add_option('-p',
dest='project_header', action='store_true',
help='Show project headers before output')
g.add_option('-v', '--verbose',
dest='verbose', action='store_true',
help='Show command error messages')
def WantPager(self, opt):
return opt.project_header
def Execute(self, opt, args):
if not opt.command:
self.Usage()
cmd = [opt.command[0]]
shell = True
if re.compile(r'^[a-z0-9A-Z_/\.-]+$').match(cmd[0]):
shell = False
if shell:
cmd.append(cmd[0])
cmd.extend(opt.command[1:])
if opt.project_header \
and not shell \
and cmd[0] == 'git':
# If this is a direct git command that can enable colorized
# output and the user prefers coloring, add --color into the
# command line because we are going to wrap the command into
# a pipe and git won't know coloring should activate.
#
for cn in cmd[1:]:
if not cn.startswith('-'):
break
if cn in _CAN_COLOR:
class ColorCmd(Coloring):
def __init__(self, config, cmd):
Coloring.__init__(self, config, cmd)
if ColorCmd(self.manifest.manifestProject.config, cn).is_on:
cmd.insert(cmd.index(cn) + 1, '--color')
mirror = self.manifest.IsMirror
out = ForallColoring(self.manifest.manifestProject.config)
out.redirect(sys.stdout)
rc = 0
first = True
for project in self.GetProjects(args):
env = os.environ.copy()
def setenv(name, val):
if val is None:
val = ''
env[name] = val.encode()
setenv('REPO_PROJECT', project.name)
setenv('REPO_PATH', project.relpath)
setenv('REPO_REMOTE', project.remote.name)
setenv('REPO_LREV', project.GetRevisionId())
setenv('REPO_RREV', project.revisionExpr)
if mirror:
setenv('GIT_DIR', project.gitdir)
cwd = project.gitdir
else:
cwd = project.worktree
if not os.path.exists(cwd):
if (opt.project_header and opt.verbose) \
or not opt.project_header:
print >>sys.stderr, 'skipping %s/' % project.relpath
continue
if opt.project_header:
stdin = subprocess.PIPE
stdout = subprocess.PIPE
stderr = subprocess.PIPE
else:
stdin = None
stdout = None
stderr = None
p = subprocess.Popen(cmd,
cwd = cwd,
shell = shell,
env = env,
stdin = stdin,
stdout = stdout,
stderr = stderr)
if opt.project_header:
class sfd(object):
def __init__(self, fd, dest):
self.fd = fd
self.dest = dest
def fileno(self):
return self.fd.fileno()
empty = True
didout = False
errbuf = ''
p.stdin.close()
s_in = [sfd(p.stdout, sys.stdout),
sfd(p.stderr, sys.stderr)]
for s in s_in:
flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
while s_in:
in_ready, out_ready, err_ready = select.select(s_in, [], [])
for s in in_ready:
buf = s.fd.read(4096)
if not buf:
s.fd.close()
s_in.remove(s)
continue
if not opt.verbose:
if s.fd == p.stdout:
didout = True
else:
errbuf += buf
continue
if empty:
if first:
first = False
else:
out.nl()
out.project('project %s/', project.relpath)
out.nl()
out.flush()
if errbuf:
sys.stderr.write(errbuf)
sys.stderr.flush()
errbuf = ''
empty = False
s.dest.write(buf)
s.dest.flush()
r = p.wait()
if r != 0 and r != rc:
rc = r
if rc != 0:
sys.exit(rc)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from color import Coloring
from command import InteractiveCommand
from git_command import GitCommand
class _ProjectList(Coloring):
def __init__(self, gc):
Coloring.__init__(self, gc, 'interactive')
self.prompt = self.printer('prompt', fg='blue', attr='bold')
self.header = self.printer('header', attr='bold')
self.help = self.printer('help', fg='red', attr='bold')
class Stage(InteractiveCommand):
common = True
helpSummary = "Stage file(s) for commit"
helpUsage = """
%prog -i [<project>...]
"""
helpDescription = """
The '%prog' command stages files to prepare the next commit.
"""
def _Options(self, p):
p.add_option('-i', '--interactive',
dest='interactive', action='store_true',
help='use interactive staging')
def Execute(self, opt, args):
if opt.interactive:
self._Interactive(opt, args)
else:
self.Usage()
def _Interactive(self, opt, args):
all = filter(lambda x: x.IsDirty(), self.GetProjects(args))
if not all:
print >>sys.stderr,'no projects have uncommitted modifications'
return
out = _ProjectList(self.manifest.manifestProject.config)
while True:
out.header(' %s', 'project')
out.nl()
for i in xrange(0, len(all)):
p = all[i]
out.write('%3d: %s', i + 1, p.relpath + '/')
out.nl()
out.nl()
out.write('%3d: (', 0)
out.prompt('q')
out.write('uit)')
out.nl()
out.prompt('project> ')
try:
a = sys.stdin.readline()
except KeyboardInterrupt:
out.nl()
break
if a == '':
out.nl()
break
a = a.strip()
if a.lower() in ('q', 'quit', 'exit'):
break
if not a:
continue
try:
a_index = int(a)
except ValueError:
a_index = None
if a_index is not None:
if a_index == 0:
break
if 0 < a_index and a_index <= len(all):
_AddI(all[a_index - 1])
continue
p = filter(lambda x: x.name == a or x.relpath == a, all)
if len(p) == 1:
_AddI(p[0])
continue
print 'Bye.'
def _AddI(project):
p = GitCommand(project, ['add', '--interactive'], bare=False)
p.Wait()
| Python |
#
# Copyright (C) 2010 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sync import Sync
class Smartsync(Sync):
common = True
helpSummary = "Update working tree to the latest known good revision"
helpUsage = """
%prog [<project>...]
"""
helpDescription = """
The '%prog' command is a shortcut for sync -s.
"""
def _Options(self, p):
Sync._Options(self, p, show_smart=False)
def Execute(self, opt, args):
opt.smart_sync = True
Sync.Execute(self, opt, args)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from optparse import SUPPRESS_HELP
import os
import re
import shutil
import socket
import subprocess
import sys
import time
import xmlrpclib
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
from git_command import GIT
from git_refs import R_HEADS
from project import HEAD
from project import Project
from project import RemoteSpec
from command import Command, MirrorSafeCommand
from error import RepoChangedException, GitError
from project import R_HEADS
from project import SyncBuffer
from progress import Progress
class Sync(Command, MirrorSafeCommand):
jobs = 1
common = True
helpSummary = "Update working tree to the latest revision"
helpUsage = """
%prog [<project>...]
"""
helpDescription = """
The '%prog' command synchronizes local project directories
with the remote repositories specified in the manifest. If a local
project does not yet exist, it will clone a new local directory from
the remote repository and set up tracking branches as specified in
the manifest. If the local project already exists, '%prog'
will update the remote branches and rebase any new local changes
on top of the new remote changes.
'%prog' will synchronize all projects listed at the command
line. Projects can be specified either by name, or by a relative
or absolute path to the project's local directory. If no projects
are specified, '%prog' will synchronize all projects listed in
the manifest.
The -d/--detach option can be used to switch specified projects
back to the manifest revision. This option is especially helpful
if the project is currently on a topic branch, but the manifest
revision is temporarily needed.
The -s/--smart-sync option can be used to sync to a known good
build as specified by the manifest-server element in the current
manifest.
The -f/--force-broken option can be used to proceed with syncing
other projects if a project sync fails.
SSH Connections
---------------
If at least one project remote URL uses an SSH connection (ssh://,
git+ssh://, or user@host:path syntax) repo will automatically
enable the SSH ControlMaster option when connecting to that host.
This feature permits other projects in the same '%prog' session to
reuse the same SSH tunnel, saving connection setup overheads.
To disable this behavior on UNIX platforms, set the GIT_SSH
environment variable to 'ssh'. For example:
export GIT_SSH=ssh
%prog
Compatibility
~~~~~~~~~~~~~
This feature is automatically disabled on Windows, due to the lack
of UNIX domain socket support.
This feature is not compatible with url.insteadof rewrites in the
user's ~/.gitconfig. '%prog' is currently not able to perform the
rewrite early enough to establish the ControlMaster tunnel.
If the remote SSH daemon is Gerrit Code Review, version 2.0.10 or
later is required to fix a server side protocol bug.
"""
def _Options(self, p, show_smart=True):
p.add_option('-f', '--force-broken',
dest='force_broken', action='store_true',
help="continue sync even if a project fails to sync")
p.add_option('-l','--local-only',
dest='local_only', action='store_true',
help="only update working tree, don't fetch")
p.add_option('-n','--network-only',
dest='network_only', action='store_true',
help="fetch only, don't update working tree")
p.add_option('-d','--detach',
dest='detach_head', action='store_true',
help='detach projects back to manifest revision')
p.add_option('-q','--quiet',
dest='quiet', action='store_true',
help='be more quiet')
p.add_option('-j','--jobs',
dest='jobs', action='store', type='int',
help="number of projects to fetch simultaneously")
if show_smart:
p.add_option('-s', '--smart-sync',
dest='smart_sync', action='store_true',
help='smart sync using manifest from a known good build')
g = p.add_option_group('repo Version options')
g.add_option('--no-repo-verify',
dest='no_repo_verify', action='store_true',
help='do not verify repo source code')
g.add_option('--repo-upgraded',
dest='repo_upgraded', action='store_true',
help=SUPPRESS_HELP)
def _FetchHelper(self, opt, project, lock, fetched, pm, sem):
if not project.Sync_NetworkHalf(quiet=opt.quiet):
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
if opt.force_broken:
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
else:
sem.release()
sys.exit(1)
lock.acquire()
fetched.add(project.gitdir)
pm.update()
lock.release()
sem.release()
def _Fetch(self, projects, opt):
fetched = set()
pm = Progress('Fetching projects', len(projects))
if self.jobs == 1:
for project in projects:
pm.update()
if project.Sync_NetworkHalf(quiet=opt.quiet):
fetched.add(project.gitdir)
else:
print >>sys.stderr, 'error: Cannot fetch %s' % project.name
if opt.force_broken:
print >>sys.stderr, 'warn: --force-broken, continuing to sync'
else:
sys.exit(1)
else:
threads = set()
lock = _threading.Lock()
sem = _threading.Semaphore(self.jobs)
for project in projects:
sem.acquire()
t = _threading.Thread(target = self._FetchHelper,
args = (opt,
project,
lock,
fetched,
pm,
sem))
threads.add(t)
t.start()
for t in threads:
t.join()
pm.end()
for project in projects:
project.bare_git.gc('--auto')
return fetched
def UpdateProjectList(self):
new_project_paths = []
for project in self.manifest.projects.values():
if project.relpath:
new_project_paths.append(project.relpath)
file_name = 'project.list'
file_path = os.path.join(self.manifest.repodir, file_name)
old_project_paths = []
if os.path.exists(file_path):
fd = open(file_path, 'r')
try:
old_project_paths = fd.read().split('\n')
finally:
fd.close()
for path in old_project_paths:
if not path:
continue
if path not in new_project_paths:
"""If the path has already been deleted, we don't need to do it
"""
if os.path.exists(self.manifest.topdir + '/' + path):
project = Project(
manifest = self.manifest,
name = path,
remote = RemoteSpec('origin'),
gitdir = os.path.join(self.manifest.topdir,
path, '.git'),
worktree = os.path.join(self.manifest.topdir, path),
relpath = path,
revisionExpr = 'HEAD',
revisionId = None)
if project.IsDirty():
print >>sys.stderr, 'error: Cannot remove project "%s": \
uncommitted changes are present' % project.relpath
print >>sys.stderr, ' commit changes, then run sync again'
return -1
else:
print >>sys.stderr, 'Deleting obsolete path %s' % project.worktree
shutil.rmtree(project.worktree)
# Try deleting parent subdirs if they are empty
dir = os.path.dirname(project.worktree)
while dir != self.manifest.topdir:
try:
os.rmdir(dir)
except OSError:
break
dir = os.path.dirname(dir)
new_project_paths.sort()
fd = open(file_path, 'w')
try:
fd.write('\n'.join(new_project_paths))
fd.write('\n')
finally:
fd.close()
return 0
def Execute(self, opt, args):
if opt.jobs:
self.jobs = opt.jobs
if opt.network_only and opt.detach_head:
print >>sys.stderr, 'error: cannot combine -n and -d'
sys.exit(1)
if opt.network_only and opt.local_only:
print >>sys.stderr, 'error: cannot combine -n and -l'
sys.exit(1)
if opt.smart_sync:
if not self.manifest.manifest_server:
print >>sys.stderr, \
'error: cannot smart sync: no manifest server defined in manifest'
sys.exit(1)
try:
server = xmlrpclib.Server(self.manifest.manifest_server)
p = self.manifest.manifestProject
b = p.GetBranch(p.CurrentBranch)
branch = b.merge
if branch.startswith(R_HEADS):
branch = branch[len(R_HEADS):]
env = os.environ.copy()
if (env.has_key('TARGET_PRODUCT') and
env.has_key('TARGET_BUILD_VARIANT')):
target = '%s-%s' % (env['TARGET_PRODUCT'],
env['TARGET_BUILD_VARIANT'])
[success, manifest_str] = server.GetApprovedManifest(branch, target)
else:
[success, manifest_str] = server.GetApprovedManifest(branch)
if success:
manifest_name = "smart_sync_override.xml"
manifest_path = os.path.join(self.manifest.manifestProject.worktree,
manifest_name)
try:
f = open(manifest_path, 'w')
try:
f.write(manifest_str)
finally:
f.close()
except IOError:
print >>sys.stderr, 'error: cannot write manifest to %s' % \
manifest_path
sys.exit(1)
self.manifest.Override(manifest_name)
else:
print >>sys.stderr, 'error: %s' % manifest_str
sys.exit(1)
except socket.error:
print >>sys.stderr, 'error: cannot connect to manifest server %s' % (
self.manifest.manifest_server)
sys.exit(1)
rp = self.manifest.repoProject
rp.PreSync()
mp = self.manifest.manifestProject
mp.PreSync()
if opt.repo_upgraded:
_PostRepoUpgrade(self.manifest)
if not opt.local_only:
mp.Sync_NetworkHalf(quiet=opt.quiet)
if mp.HasChanges:
syncbuf = SyncBuffer(mp.config)
mp.Sync_LocalHalf(syncbuf)
if not syncbuf.Finish():
sys.exit(1)
self.manifest._Unload()
all = self.GetProjects(args, missing_ok=True)
if not opt.local_only:
to_fetch = []
now = time.time()
if (24 * 60 * 60) <= (now - rp.LastFetch):
to_fetch.append(rp)
to_fetch.extend(all)
fetched = self._Fetch(to_fetch, opt)
_PostRepoFetch(rp, opt.no_repo_verify)
if opt.network_only:
# bail out now; the rest touches the working tree
return
if mp.HasChanges:
syncbuf = SyncBuffer(mp.config)
mp.Sync_LocalHalf(syncbuf)
if not syncbuf.Finish():
sys.exit(1)
_ReloadManifest(self)
mp = self.manifest.manifestProject
all = self.GetProjects(args, missing_ok=True)
missing = []
for project in all:
if project.gitdir not in fetched:
missing.append(project)
self._Fetch(missing, opt)
if self.manifest.IsMirror:
# bail out now, we have no working tree
return
if self.UpdateProjectList():
sys.exit(1)
syncbuf = SyncBuffer(mp.config,
detach_head = opt.detach_head)
pm = Progress('Syncing work tree', len(all))
for project in all:
pm.update()
if project.worktree:
project.Sync_LocalHalf(syncbuf)
pm.end()
print >>sys.stderr
if not syncbuf.Finish():
sys.exit(1)
def _ReloadManifest(cmd):
old = cmd.manifest
new = cmd.GetManifest(reparse=True)
if old.__class__ != new.__class__:
print >>sys.stderr, 'NOTICE: manifest format has changed ***'
new.Upgrade_Local(old)
else:
if new.notice:
print new.notice
def _PostRepoUpgrade(manifest):
for project in manifest.projects.values():
if project.Exists:
project.PostRepoUpgrade()
def _PostRepoFetch(rp, no_repo_verify=False, verbose=False):
if rp.HasChanges:
print >>sys.stderr, 'info: A new version of repo is available'
print >>sys.stderr, ''
if no_repo_verify or _VerifyTag(rp):
syncbuf = SyncBuffer(rp.config)
rp.Sync_LocalHalf(syncbuf)
if not syncbuf.Finish():
sys.exit(1)
print >>sys.stderr, 'info: Restarting repo with latest version'
raise RepoChangedException(['--repo-upgraded'])
else:
print >>sys.stderr, 'warning: Skipped upgrade to unverified version'
else:
if verbose:
print >>sys.stderr, 'repo version %s is current' % rp.work_git.describe(HEAD)
def _VerifyTag(project):
gpg_dir = os.path.expanduser('~/.repoconfig/gnupg')
if not os.path.exists(gpg_dir):
print >>sys.stderr,\
"""warning: GnuPG was not available during last "repo init"
warning: Cannot automatically authenticate repo."""
return True
try:
cur = project.bare_git.describe(project.GetRevisionId())
except GitError:
cur = None
if not cur \
or re.compile(r'^.*-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur):
rev = project.revisionExpr
if rev.startswith(R_HEADS):
rev = rev[len(R_HEADS):]
print >>sys.stderr
print >>sys.stderr,\
"warning: project '%s' branch '%s' is not signed" \
% (project.name, rev)
return False
env = os.environ.copy()
env['GIT_DIR'] = project.gitdir.encode()
env['GNUPGHOME'] = gpg_dir.encode()
cmd = [GIT, 'tag', '-v', cur]
proc = subprocess.Popen(cmd,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
env = env)
out = proc.stdout.read()
proc.stdout.close()
err = proc.stderr.read()
proc.stderr.close()
if proc.wait() != 0:
print >>sys.stderr
print >>sys.stderr, out
print >>sys.stderr, err
print >>sys.stderr
return False
return True
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import re
import sys
from command import InteractiveCommand
from editor import Editor
from error import UploadError
UNUSUAL_COMMIT_THRESHOLD = 5
def _ConfirmManyUploads(multiple_branches=False):
if multiple_branches:
print "ATTENTION: One or more branches has an unusually high number of commits."
else:
print "ATTENTION: You are uploading an unusually high number of commits."
print "YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across branches?)"
answer = raw_input("If you are sure you intend to do this, type 'yes': ").strip()
return answer == "yes"
def _die(fmt, *args):
msg = fmt % args
print >>sys.stderr, 'error: %s' % msg
sys.exit(1)
def _SplitEmails(values):
result = []
for str in values:
result.extend([s.strip() for s in str.split(',')])
return result
class Upload(InteractiveCommand):
common = True
helpSummary = "Upload changes for code review"
helpUsage="""
%prog [--re --cc] [<project>]...
"""
helpDescription = """
The '%prog' command is used to send changes to the Gerrit Code
Review system. It searches for topic branches in local projects
that have not yet been published for review. If multiple topic
branches are found, '%prog' opens an editor to allow the user to
select which branches to upload.
'%prog' searches for uploadable changes in all projects listed at
the command line. Projects can be specified either by name, or by
a relative or absolute path to the project's local directory. If no
projects are specified, '%prog' will search for uploadable changes
in all projects listed in the manifest.
If the --reviewers or --cc options are passed, those emails are
added to the respective list of users, and emails are sent to any
new users. Users passed as --reviewers must already be registered
with the code review system, or the upload will fail.
Configuration
-------------
review.URL.autoupload:
To disable the "Upload ... (y/n)?" prompt, you can set a per-project
or global Git configuration option. If review.URL.autoupload is set
to "true" then repo will assume you always answer "y" at the prompt,
and will not prompt you further. If it is set to "false" then repo
will assume you always answer "n", and will abort.
review.URL.autocopy:
To automatically copy a user or mailing list to all uploaded reviews,
you can set a per-project or global Git option to do so. Specifically,
review.URL.autocopy can be set to a comma separated list of reviewers
who you always want copied on all uploads with a non-empty --re
argument.
review.URL.username:
Override the username used to connect to Gerrit Code Review.
By default the local part of the email address is used.
The URL must match the review URL listed in the manifest XML file,
or in the .git/config within the project. For example:
[remote "origin"]
url = git://git.example.com/project.git
review = http://review.example.com/
[review "http://review.example.com/"]
autoupload = true
autocopy = johndoe@company.com,my-team-alias@company.com
References
----------
Gerrit Code Review: http://code.google.com/p/gerrit/
"""
def _Options(self, p):
p.add_option('-t',
dest='auto_topic', action='store_true',
help='Send local branch name to Gerrit Code Review')
p.add_option('--re', '--reviewers',
type='string', action='append', dest='reviewers',
help='Request reviews from these people.')
p.add_option('--cc',
type='string', action='append', dest='cc',
help='Also send email to these email addresses.')
def _SingleBranch(self, opt, branch, people):
project = branch.project
name = branch.name
remote = project.GetBranch(name).remote
key = 'review.%s.autoupload' % remote.review
answer = project.config.GetBoolean(key)
if answer is False:
_die("upload blocked by %s = false" % key)
if answer is None:
date = branch.date
list = branch.commits
print 'Upload project %s/:' % project.relpath
print ' branch %s (%2d commit%s, %s):' % (
name,
len(list),
len(list) != 1 and 's' or '',
date)
for commit in list:
print ' %s' % commit
sys.stdout.write('to %s (y/n)? ' % remote.review)
answer = sys.stdin.readline().strip()
answer = answer in ('y', 'Y', 'yes', '1', 'true', 't')
if answer:
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
answer = _ConfirmManyUploads()
if answer:
self._UploadAndReport(opt, [branch], people)
else:
_die("upload aborted by user")
def _MultipleBranches(self, opt, pending, people):
projects = {}
branches = {}
script = []
script.append('# Uncomment the branches to upload:')
for project, avail in pending:
script.append('#')
script.append('# project %s/:' % project.relpath)
b = {}
for branch in avail:
name = branch.name
date = branch.date
list = branch.commits
if b:
script.append('#')
script.append('# branch %s (%2d commit%s, %s):' % (
name,
len(list),
len(list) != 1 and 's' or '',
date))
for commit in list:
script.append('# %s' % commit)
b[name] = branch
projects[project.relpath] = project
branches[project.name] = b
script.append('')
script = Editor.EditString("\n".join(script)).split("\n")
project_re = re.compile(r'^#?\s*project\s*([^\s]+)/:$')
branch_re = re.compile(r'^\s*branch\s*([^\s(]+)\s*\(.*')
project = None
todo = []
for line in script:
m = project_re.match(line)
if m:
name = m.group(1)
project = projects.get(name)
if not project:
_die('project %s not available for upload', name)
continue
m = branch_re.match(line)
if m:
name = m.group(1)
if not project:
_die('project for branch %s not in script', name)
branch = branches[project.name].get(name)
if not branch:
_die('branch %s not in %s', name, project.relpath)
todo.append(branch)
if not todo:
_die("nothing uncommented for upload")
many_commits = False
for branch in todo:
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
many_commits = True
break
if many_commits:
if not _ConfirmManyUploads(multiple_branches=True):
_die("upload aborted by user")
self._UploadAndReport(opt, todo, people)
def _AppendAutoCcList(self, branch, people):
"""
Appends the list of users in the CC list in the git project's config if a
non-empty reviewer list was found.
"""
name = branch.name
project = branch.project
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
raw_list = project.config.GetString(key)
if not raw_list is None and len(people[0]) > 0:
people[1].extend([entry.strip() for entry in raw_list.split(',')])
def _FindGerritChange(self, branch):
last_pub = branch.project.WasPublished(branch.name)
if last_pub is None:
return ""
refs = branch.GetPublishedRefs()
try:
# refs/changes/XYZ/N --> XYZ
return refs.get(last_pub).split('/')[-2]
except:
return ""
def _UploadAndReport(self, opt, todo, original_people):
have_errors = False
for branch in todo:
try:
people = copy.deepcopy(original_people)
self._AppendAutoCcList(branch, people)
# Check if there are local changes that may have been forgotten
if branch.project.HasChanges():
key = 'review.%s.autoupload' % branch.project.remote.review
answer = branch.project.config.GetBoolean(key)
# if they want to auto upload, let's not ask because it could be automated
if answer is None:
sys.stdout.write('Uncommitted changes in ' + branch.project.name + ' (did you forget to amend?). Continue uploading? (y/n) ')
a = sys.stdin.readline().strip().lower()
if a not in ('y', 'yes', 't', 'true', 'on'):
print >>sys.stderr, "skipping upload"
branch.uploaded = False
branch.error = 'User aborted'
continue
branch.UploadForReview(people, auto_topic=opt.auto_topic)
branch.uploaded = True
except UploadError, e:
branch.error = e
branch.uploaded = False
have_errors = True
print >>sys.stderr, ''
print >>sys.stderr, '----------------------------------------------------------------------'
if have_errors:
for branch in todo:
if not branch.uploaded:
if len(str(branch.error)) <= 30:
fmt = ' (%s)'
else:
fmt = '\n (%s)'
print >>sys.stderr, ('[FAILED] %-15s %-15s' + fmt) % (
branch.project.relpath + '/', \
branch.name, \
str(branch.error))
print >>sys.stderr, ''
for branch in todo:
if branch.uploaded:
print >>sys.stderr, '[OK ] %-15s %s' % (
branch.project.relpath + '/',
branch.name)
if have_errors:
sys.exit(1)
def Execute(self, opt, args):
project_list = self.GetProjects(args)
pending = []
reviewers = []
cc = []
if opt.reviewers:
reviewers = _SplitEmails(opt.reviewers)
if opt.cc:
cc = _SplitEmails(opt.cc)
people = (reviewers,cc)
for project in project_list:
avail = project.GetUploadableBranches()
if avail:
pending.append((project, avail))
if not pending:
print >>sys.stdout, "no branches ready for upload"
elif len(pending) == 1 and len(pending[0][1]) == 1:
self._SingleBranch(opt, pending[0][1][0], people)
else:
self._MultipleBranches(opt, pending, people)
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from command import PagedCommand
from manifest_submodule import SubmoduleManifest
from manifest_xml import XmlManifest
def _doc(name):
r = os.path.dirname(__file__)
r = os.path.dirname(r)
fd = open(os.path.join(r, 'docs', name))
try:
return fd.read()
finally:
fd.close()
class Manifest(PagedCommand):
common = False
helpSummary = "Manifest inspection utility"
helpUsage = """
%prog [options]
"""
_xmlHelp = """
With the -o option, exports the current manifest for inspection.
The manifest and (if present) local_manifest.xml are combined
together to produce a single manifest file. This file can be stored
in a Git repository for use during future 'repo init' invocations.
"""
@property
def helpDescription(self):
help = ''
if isinstance(self.manifest, XmlManifest):
help += self._xmlHelp + '\n' + _doc('manifest_xml.txt')
if isinstance(self.manifest, SubmoduleManifest):
help += _doc('manifest_submodule.txt')
return help
def _Options(self, p):
if isinstance(self.manifest, XmlManifest):
p.add_option('--upgrade',
dest='upgrade', action='store_true',
help='Upgrade XML manifest to submodule')
p.add_option('-r', '--revision-as-HEAD',
dest='peg_rev', action='store_true',
help='Save revisions as current HEAD')
p.add_option('-o', '--output-file',
dest='output_file',
help='File to save the manifest to',
metavar='-|NAME.xml')
def WantPager(self, opt):
if isinstance(self.manifest, XmlManifest) and opt.upgrade:
return False
return True
def _Output(self, opt):
if opt.output_file == '-':
fd = sys.stdout
else:
fd = open(opt.output_file, 'w')
self.manifest.Save(fd,
peg_rev = opt.peg_rev)
fd.close()
if opt.output_file != '-':
print >>sys.stderr, 'Saved manifest to %s' % opt.output_file
def _Upgrade(self):
old = self.manifest
if isinstance(old, SubmoduleManifest):
print >>sys.stderr, 'error: already upgraded'
sys.exit(1)
old._Load()
for p in old.projects.values():
if not os.path.exists(p.gitdir) \
or not os.path.exists(p.worktree):
print >>sys.stderr, 'fatal: project "%s" missing' % p.relpath
sys.exit(1)
new = SubmoduleManifest(old.repodir)
new.FromXml_Local_1(old, checkout=False)
new.FromXml_Definition(old)
new.FromXml_Local_2(old)
print >>sys.stderr, 'upgraded manifest; commit result manually'
def Execute(self, opt, args):
if args:
self.Usage()
if isinstance(self.manifest, XmlManifest):
if opt.upgrade:
self._Upgrade()
return
if opt.output_file is not None:
self._Output(opt)
return
print >>sys.stderr, 'error: no operation to perform'
print >>sys.stderr, 'error: see repo help manifest'
sys.exit(1)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import pager
from git_config import GitConfig
COLORS = {None :-1,
'normal' :-1,
'black' : 0,
'red' : 1,
'green' : 2,
'yellow' : 3,
'blue' : 4,
'magenta': 5,
'cyan' : 6,
'white' : 7}
ATTRS = {None :-1,
'bold' : 1,
'dim' : 2,
'ul' : 4,
'blink' : 5,
'reverse': 7}
RESET = "\033[m"
def is_color(s): return s in COLORS
def is_attr(s): return s in ATTRS
def _Color(fg = None, bg = None, attr = None):
fg = COLORS[fg]
bg = COLORS[bg]
attr = ATTRS[attr]
if attr >= 0 or fg >= 0 or bg >= 0:
need_sep = False
code = "\033["
if attr >= 0:
code += chr(ord('0') + attr)
need_sep = True
if fg >= 0:
if need_sep:
code += ';'
need_sep = True
if fg < 8:
code += '3%c' % (ord('0') + fg)
else:
code += '38;5;%d' % fg
if bg >= 0:
if need_sep:
code += ';'
need_sep = True
if bg < 8:
code += '4%c' % (ord('0') + bg)
else:
code += '48;5;%d' % bg
code += 'm'
else:
code = ''
return code
class Coloring(object):
def __init__(self, config, type):
self._section = 'color.%s' % type
self._config = config
self._out = sys.stdout
on = self._config.GetString(self._section)
if on is None:
on = self._config.GetString('color.ui')
if on == 'auto':
if pager.active or os.isatty(1):
self._on = True
else:
self._on = False
elif on in ('true', 'always'):
self._on = True
else:
self._on = False
def redirect(self, out):
self._out = out
@property
def is_on(self):
return self._on
def write(self, fmt, *args):
self._out.write(fmt % args)
def flush(self):
self._out.flush()
def nl(self):
self._out.write('\n')
def printer(self, opt=None, fg=None, bg=None, attr=None):
s = self
c = self.colorer(opt, fg, bg, attr)
def f(fmt, *args):
s._out.write(c(fmt, *args))
return f
def colorer(self, opt=None, fg=None, bg=None, attr=None):
if self._on:
c = self._parse(opt, fg, bg, attr)
def f(fmt, *args):
str = fmt % args
return ''.join([c, str, RESET])
return f
else:
def f(fmt, *args):
return fmt % args
return f
def _parse(self, opt, fg, bg, attr):
if not opt:
return _Color(fg, bg, attr)
v = self._config.GetString('%s.%s' % (self._section, opt))
if v is None:
return _Color(fg, bg, attr)
v = v.strip().lower()
if v == "reset":
return RESET
elif v == '':
return _Color(fg, bg, attr)
have_fg = False
for a in v.split(' '):
if is_color(a):
if have_fg: bg = a
else: fg = a
elif is_attr(a):
attr = a
return _Color(fg, bg, attr)
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from trace import Trace
HEAD = 'HEAD'
R_HEADS = 'refs/heads/'
R_TAGS = 'refs/tags/'
R_PUB = 'refs/published/'
class GitRefs(object):
def __init__(self, gitdir):
self._gitdir = gitdir
self._phyref = None
self._symref = None
self._mtime = {}
@property
def all(self):
self._EnsureLoaded()
return self._phyref
def get(self, name):
try:
return self.all[name]
except KeyError:
return ''
def deleted(self, name):
if self._phyref is not None:
if name in self._phyref:
del self._phyref[name]
if name in self._symref:
del self._symref[name]
if name in self._mtime:
del self._mtime[name]
def symref(self, name):
try:
self._EnsureLoaded()
return self._symref[name]
except KeyError:
return ''
def _EnsureLoaded(self):
if self._phyref is None or self._NeedUpdate():
self._LoadAll()
def _NeedUpdate(self):
Trace(': scan refs %s', self._gitdir)
for name, mtime in self._mtime.iteritems():
try:
if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
return True
except OSError:
return True
return False
def _LoadAll(self):
Trace(': load refs %s', self._gitdir)
self._phyref = {}
self._symref = {}
self._mtime = {}
self._ReadPackedRefs()
self._ReadLoose('refs/')
self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
scan = self._symref
attempts = 0
while scan and attempts < 5:
scan_next = {}
for name, dest in scan.iteritems():
if dest in self._phyref:
self._phyref[name] = self._phyref[dest]
else:
scan_next[name] = dest
scan = scan_next
attempts += 1
def _ReadPackedRefs(self):
path = os.path.join(self._gitdir, 'packed-refs')
try:
fd = open(path, 'rb')
mtime = os.path.getmtime(path)
except IOError:
return
except OSError:
return
try:
for line in fd:
if line[0] == '#':
continue
if line[0] == '^':
continue
line = line[:-1]
p = line.split(' ')
id = p[0]
name = p[1]
self._phyref[name] = id
finally:
fd.close()
self._mtime['packed-refs'] = mtime
def _ReadLoose(self, prefix):
base = os.path.join(self._gitdir, prefix)
for name in os.listdir(base):
p = os.path.join(base, name)
if os.path.isdir(p):
self._mtime[prefix] = os.path.getmtime(base)
self._ReadLoose(prefix + name + '/')
elif name.endswith('.lock'):
pass
else:
self._ReadLoose1(p, prefix + name)
def _ReadLoose1(self, path, name):
try:
fd = open(path, 'rb')
mtime = os.path.getmtime(path)
except OSError:
return
except IOError:
return
try:
id = fd.readline()
finally:
fd.close()
if not id:
return
id = id[:-1]
if id.startswith('ref: '):
self._symref[name] = id[5:]
else:
self._phyref[name] = id
self._mtime[name] = mtime
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import sys
import subprocess
import tempfile
from error import EditorError
class Editor(object):
"""Manages the user's preferred text editor."""
_editor = None
globalConfig = None
@classmethod
def _GetEditor(cls):
if cls._editor is None:
cls._editor = cls._SelectEditor()
return cls._editor
@classmethod
def _SelectEditor(cls):
e = os.getenv('GIT_EDITOR')
if e:
return e
if cls.globalConfig:
e = cls.globalConfig.GetString('core.editor')
if e:
return e
e = os.getenv('VISUAL')
if e:
return e
e = os.getenv('EDITOR')
if e:
return e
if os.getenv('TERM') == 'dumb':
print >>sys.stderr,\
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
Tried to fall back to vi but terminal is dumb. Please configure at
least one of these before using this command."""
sys.exit(1)
return 'vi'
@classmethod
def EditString(cls, data):
"""Opens an editor to edit the given content.
Args:
data : the text to edit
Returns:
new value of edited text; None if editing did not succeed
"""
editor = cls._GetEditor()
if editor == ':':
return data
fd, path = tempfile.mkstemp()
try:
os.write(fd, data)
os.close(fd)
fd = None
if re.compile("^.*[$ \t'].*$").match(editor):
args = [editor + ' "$@"', 'sh']
shell = True
else:
args = [editor]
shell = False
args.append(path)
try:
rc = subprocess.Popen(args, shell=shell).wait()
except OSError, e:
raise EditorError('editor failed, %s: %s %s'
% (str(e), editor, path))
if rc != 0:
raise EditorError('editor failed with exit status %d: %s %s'
% (rc, editor, path))
fd2 = open(path)
try:
return fd2.read()
finally:
fd2.close()
finally:
if fd:
os.close(fd)
os.remove(path)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import subprocess
import tempfile
from signal import SIGTERM
from error import GitError
from trace import REPO_TRACE, IsTrace, Trace
GIT = 'git'
MIN_GIT_VERSION = (1, 5, 4)
GIT_DIR = 'GIT_DIR'
LAST_GITDIR = None
LAST_CWD = None
_ssh_proxy_path = None
_ssh_sock_path = None
_ssh_clients = []
def ssh_sock(create=True):
global _ssh_sock_path
if _ssh_sock_path is None:
if not create:
return None
dir = '/tmp'
if not os.path.exists(dir):
dir = tempfile.gettempdir()
_ssh_sock_path = os.path.join(
tempfile.mkdtemp('', 'ssh-', dir),
'master-%r@%h:%p')
return _ssh_sock_path
def _ssh_proxy():
global _ssh_proxy_path
if _ssh_proxy_path is None:
_ssh_proxy_path = os.path.join(
os.path.dirname(__file__),
'git_ssh')
return _ssh_proxy_path
def _add_ssh_client(p):
_ssh_clients.append(p)
def _remove_ssh_client(p):
try:
_ssh_clients.remove(p)
except ValueError:
pass
def terminate_ssh_clients():
global _ssh_clients
for p in _ssh_clients:
try:
os.kill(p.pid, SIGTERM)
p.wait()
except OSError:
pass
_ssh_clients = []
class _GitCall(object):
def version(self):
p = GitCommand(None, ['--version'], capture_stdout=True)
if p.Wait() == 0:
return p.stdout
return None
def __getattr__(self, name):
name = name.replace('_','-')
def fun(*cmdv):
command = [name]
command.extend(cmdv)
return GitCommand(None, command).Wait() == 0
return fun
git = _GitCall()
_git_version = None
def git_require(min_version, fail=False):
global _git_version
if _git_version is None:
ver_str = git.version()
if ver_str.startswith('git version '):
_git_version = tuple(
map(lambda x: int(x),
ver_str[len('git version '):].strip().split('.')[0:3]
))
else:
print >>sys.stderr, 'fatal: "%s" unsupported' % ver_str
sys.exit(1)
if min_version <= _git_version:
return True
if fail:
need = '.'.join(map(lambda x: str(x), min_version))
print >>sys.stderr, 'fatal: git %s or later required' % need
sys.exit(1)
return False
def _setenv(env, name, value):
env[name] = value.encode()
class GitCommand(object):
def __init__(self,
project,
cmdv,
bare = False,
provide_stdin = False,
capture_stdout = False,
capture_stderr = False,
disable_editor = False,
ssh_proxy = False,
cwd = None,
gitdir = None):
env = os.environ.copy()
for e in [REPO_TRACE,
GIT_DIR,
'GIT_ALTERNATE_OBJECT_DIRECTORIES',
'GIT_OBJECT_DIRECTORY',
'GIT_WORK_TREE',
'GIT_GRAFT_FILE',
'GIT_INDEX_FILE']:
if e in env:
del env[e]
if disable_editor:
_setenv(env, 'GIT_EDITOR', ':')
if ssh_proxy:
_setenv(env, 'REPO_SSH_SOCK', ssh_sock())
_setenv(env, 'GIT_SSH', _ssh_proxy())
if project:
if not cwd:
cwd = project.worktree
if not gitdir:
gitdir = project.gitdir
command = [GIT]
if bare:
if gitdir:
_setenv(env, GIT_DIR, gitdir)
cwd = None
command.extend(cmdv)
if provide_stdin:
stdin = subprocess.PIPE
else:
stdin = None
if capture_stdout:
stdout = subprocess.PIPE
else:
stdout = None
if capture_stderr:
stderr = subprocess.PIPE
else:
stderr = None
if IsTrace():
global LAST_CWD
global LAST_GITDIR
dbg = ''
if cwd and LAST_CWD != cwd:
if LAST_GITDIR or LAST_CWD:
dbg += '\n'
dbg += ': cd %s\n' % cwd
LAST_CWD = cwd
if GIT_DIR in env and LAST_GITDIR != env[GIT_DIR]:
if LAST_GITDIR or LAST_CWD:
dbg += '\n'
dbg += ': export GIT_DIR=%s\n' % env[GIT_DIR]
LAST_GITDIR = env[GIT_DIR]
dbg += ': '
dbg += ' '.join(command)
if stdin == subprocess.PIPE:
dbg += ' 0<|'
if stdout == subprocess.PIPE:
dbg += ' 1>|'
if stderr == subprocess.PIPE:
dbg += ' 2>|'
Trace('%s', dbg)
try:
p = subprocess.Popen(command,
cwd = cwd,
env = env,
stdin = stdin,
stdout = stdout,
stderr = stderr)
except Exception, e:
raise GitError('%s: %s' % (command[1], e))
if ssh_proxy:
_add_ssh_client(p)
self.process = p
self.stdin = p.stdin
def Wait(self):
p = self.process
if p.stdin:
p.stdin.close()
self.stdin = None
if p.stdout:
self.stdout = p.stdout.read()
p.stdout.close()
else:
p.stdout = None
if p.stderr:
self.stderr = p.stderr.read()
p.stderr.close()
else:
p.stderr = None
try:
rc = p.wait()
finally:
_remove_ssh_client(p)
return rc
| Python |
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import filecmp
import os
import re
import shutil
import stat
import sys
import urllib2
from color import Coloring
from git_command import GitCommand
from git_config import GitConfig, IsId
from error import GitError, ImportError, UploadError
from error import ManifestInvalidRevisionError
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB
def _lwrite(path, content):
lock = '%s.lock' % path
fd = open(lock, 'wb')
try:
fd.write(content)
finally:
fd.close()
try:
os.rename(lock, path)
except OSError:
os.remove(lock)
raise
def _error(fmt, *args):
msg = fmt % args
print >>sys.stderr, 'error: %s' % msg
def not_rev(r):
return '^' + r
def sq(r):
return "'" + r.replace("'", "'\''") + "'"
hook_list = None
def repo_hooks():
global hook_list
if hook_list is None:
d = os.path.abspath(os.path.dirname(__file__))
d = os.path.join(d , 'hooks')
hook_list = map(lambda x: os.path.join(d, x), os.listdir(d))
return hook_list
def relpath(dst, src):
src = os.path.dirname(src)
top = os.path.commonprefix([dst, src])
if top.endswith('/'):
top = top[:-1]
else:
top = os.path.dirname(top)
tmp = src
rel = ''
while top != tmp:
rel += '../'
tmp = os.path.dirname(tmp)
return rel + dst[len(top) + 1:]
class DownloadedChange(object):
_commit_cache = None
def __init__(self, project, base, change_id, ps_id, commit):
self.project = project
self.base = base
self.change_id = change_id
self.ps_id = ps_id
self.commit = commit
@property
def commits(self):
if self._commit_cache is None:
self._commit_cache = self.project.bare_git.rev_list(
'--abbrev=8',
'--abbrev-commit',
'--pretty=oneline',
'--reverse',
'--date-order',
not_rev(self.base),
self.commit,
'--')
return self._commit_cache
class ReviewableBranch(object):
_commit_cache = None
def __init__(self, project, branch, base):
self.project = project
self.branch = branch
self.base = base
@property
def name(self):
return self.branch.name
@property
def commits(self):
if self._commit_cache is None:
self._commit_cache = self.project.bare_git.rev_list(
'--abbrev=8',
'--abbrev-commit',
'--pretty=oneline',
'--reverse',
'--date-order',
not_rev(self.base),
R_HEADS + self.name,
'--')
return self._commit_cache
@property
def unabbrev_commits(self):
r = dict()
for commit in self.project.bare_git.rev_list(
not_rev(self.base),
R_HEADS + self.name,
'--'):
r[commit[0:8]] = commit
return r
@property
def date(self):
return self.project.bare_git.log(
'--pretty=format:%cd',
'-n', '1',
R_HEADS + self.name,
'--')
def UploadForReview(self, people, auto_topic=False):
self.project.UploadForReview(self.name,
people,
auto_topic=auto_topic)
def GetPublishedRefs(self):
refs = {}
output = self.project.bare_git.ls_remote(
self.branch.remote.SshReviewUrl(self.project.UserEmail),
'refs/changes/*')
for line in output.split('\n'):
try:
(sha, ref) = line.split()
refs[sha] = ref
except ValueError:
pass
return refs
class StatusColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'status')
self.project = self.printer('header', attr = 'bold')
self.branch = self.printer('header', attr = 'bold')
self.nobranch = self.printer('nobranch', fg = 'red')
self.important = self.printer('important', fg = 'red')
self.added = self.printer('added', fg = 'green')
self.changed = self.printer('changed', fg = 'red')
self.untracked = self.printer('untracked', fg = 'red')
class DiffColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'diff')
self.project = self.printer('header', attr = 'bold')
class _CopyFile:
def __init__(self, src, dest, abssrc, absdest):
self.src = src
self.dest = dest
self.abs_src = abssrc
self.abs_dest = absdest
def _Copy(self):
src = self.abs_src
dest = self.abs_dest
# copy file if it does not exist or is out of date
if not os.path.exists(dest) or not filecmp.cmp(src, dest):
try:
# remove existing file first, since it might be read-only
if os.path.exists(dest):
os.remove(dest)
else:
dir = os.path.dirname(dest)
if not os.path.isdir(dir):
os.makedirs(dir)
shutil.copy(src, dest)
# make the file read-only
mode = os.stat(dest)[stat.ST_MODE]
mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
os.chmod(dest, mode)
except IOError:
_error('Cannot copy file %s to %s', src, dest)
class RemoteSpec(object):
def __init__(self,
name,
url = None,
review = None):
self.name = name
self.url = url
self.review = review
class Project(object):
def __init__(self,
manifest,
name,
remote,
gitdir,
worktree,
relpath,
revisionExpr,
revisionId):
self.manifest = manifest
self.name = name
self.remote = remote
self.gitdir = gitdir.replace('\\', '/')
if worktree:
self.worktree = worktree.replace('\\', '/')
else:
self.worktree = None
self.relpath = relpath
self.revisionExpr = revisionExpr
if revisionId is None \
and revisionExpr \
and IsId(revisionExpr):
self.revisionId = revisionExpr
else:
self.revisionId = revisionId
self.snapshots = {}
self.copyfiles = []
self.config = GitConfig.ForRepository(
gitdir = self.gitdir,
defaults = self.manifest.globalConfig)
if self.worktree:
self.work_git = self._GitGetByExec(self, bare=False)
else:
self.work_git = None
self.bare_git = self._GitGetByExec(self, bare=True)
self.bare_ref = GitRefs(gitdir)
@property
def Exists(self):
return os.path.isdir(self.gitdir)
@property
def CurrentBranch(self):
"""Obtain the name of the currently checked out branch.
The branch name omits the 'refs/heads/' prefix.
None is returned if the project is on a detached HEAD.
"""
b = self.work_git.GetHead()
if b.startswith(R_HEADS):
return b[len(R_HEADS):]
return None
def IsRebaseInProgress(self):
w = self.worktree
g = os.path.join(w, '.git')
return os.path.exists(os.path.join(g, 'rebase-apply')) \
or os.path.exists(os.path.join(g, 'rebase-merge')) \
or os.path.exists(os.path.join(w, '.dotest'))
def IsDirty(self, consider_untracked=True):
"""Is the working directory modified in some way?
"""
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
if self.work_git.DiffZ('diff-index','-M','--cached',HEAD):
return True
if self.work_git.DiffZ('diff-files'):
return True
if consider_untracked and self.work_git.LsOthers():
return True
return False
_userident_name = None
_userident_email = None
@property
def UserName(self):
"""Obtain the user's personal name.
"""
if self._userident_name is None:
self._LoadUserIdentity()
return self._userident_name
@property
def UserEmail(self):
"""Obtain the user's email address. This is very likely
to be their Gerrit login.
"""
if self._userident_email is None:
self._LoadUserIdentity()
return self._userident_email
def _LoadUserIdentity(self):
u = self.bare_git.var('GIT_COMMITTER_IDENT')
m = re.compile("^(.*) <([^>]*)> ").match(u)
if m:
self._userident_name = m.group(1)
self._userident_email = m.group(2)
else:
self._userident_name = ''
self._userident_email = ''
def GetRemote(self, name):
"""Get the configuration for a single remote.
"""
return self.config.GetRemote(name)
def GetBranch(self, name):
"""Get the configuration for a single branch.
"""
return self.config.GetBranch(name)
def GetBranches(self):
"""Get all existing local branches.
"""
current = self.CurrentBranch
all = self._allrefs
heads = {}
pubd = {}
for name, id in all.iteritems():
if name.startswith(R_HEADS):
name = name[len(R_HEADS):]
b = self.GetBranch(name)
b.current = name == current
b.published = None
b.revision = id
heads[name] = b
for name, id in all.iteritems():
if name.startswith(R_PUB):
name = name[len(R_PUB):]
b = heads.get(name)
if b:
b.published = id
return heads
## Status Display ##
def HasChanges(self):
"""Returns true if there are uncommitted changes.
"""
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
if self.IsRebaseInProgress():
return True
if self.work_git.DiffZ('diff-index', '--cached', HEAD):
return True
if self.work_git.DiffZ('diff-files'):
return True
if self.work_git.LsOthers():
return True
return False
def PrintWorkTreeStatus(self):
"""Prints the status of the repository to stdout.
"""
if not os.path.isdir(self.worktree):
print ''
print 'project %s/' % self.relpath
print ' missing (run "repo sync")'
return
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
rb = self.IsRebaseInProgress()
di = self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD)
df = self.work_git.DiffZ('diff-files')
do = self.work_git.LsOthers()
if not rb and not di and not df and not do:
return 'CLEAN'
out = StatusColoring(self.config)
out.project('project %-40s', self.relpath + '/')
branch = self.CurrentBranch
if branch is None:
out.nobranch('(*** NO BRANCH ***)')
else:
out.branch('branch %s', branch)
out.nl()
if rb:
out.important('prior sync failed; rebase still in progress')
out.nl()
paths = list()
paths.extend(di.keys())
paths.extend(df.keys())
paths.extend(do)
paths = list(set(paths))
paths.sort()
for p in paths:
try: i = di[p]
except KeyError: i = None
try: f = df[p]
except KeyError: f = None
if i: i_status = i.status.upper()
else: i_status = '-'
if f: f_status = f.status.lower()
else: f_status = '-'
if i and i.src_path:
line = ' %s%s\t%s => %s (%s%%)' % (i_status, f_status,
i.src_path, p, i.level)
else:
line = ' %s%s\t%s' % (i_status, f_status, p)
if i and not f:
out.added('%s', line)
elif (i and f) or (not i and f):
out.changed('%s', line)
elif not i and not f:
out.untracked('%s', line)
else:
out.write('%s', line)
out.nl()
return 'DIRTY'
def PrintWorkTreeDiff(self):
"""Prints the status of the repository to stdout.
"""
out = DiffColoring(self.config)
cmd = ['diff']
if out.is_on:
cmd.append('--color')
cmd.append(HEAD)
cmd.append('--')
p = GitCommand(self,
cmd,
capture_stdout = True,
capture_stderr = True)
has_diff = False
for line in p.process.stdout:
if not has_diff:
out.nl()
out.project('project %s/' % self.relpath)
out.nl()
has_diff = True
print line[:-1]
p.Wait()
## Publish / Upload ##
def WasPublished(self, branch, all=None):
"""Was the branch published (uploaded) for code review?
If so, returns the SHA-1 hash of the last published
state for the branch.
"""
key = R_PUB + branch
if all is None:
try:
return self.bare_git.rev_parse(key)
except GitError:
return None
else:
try:
return all[key]
except KeyError:
return None
def CleanPublishedCache(self, all=None):
"""Prunes any stale published refs.
"""
if all is None:
all = self._allrefs
heads = set()
canrm = {}
for name, id in all.iteritems():
if name.startswith(R_HEADS):
heads.add(name)
elif name.startswith(R_PUB):
canrm[name] = id
for name, id in canrm.iteritems():
n = name[len(R_PUB):]
if R_HEADS + n not in heads:
self.bare_git.DeleteRef(name, id)
def GetUploadableBranches(self):
"""List any branches which can be uploaded for review.
"""
heads = {}
pubed = {}
for name, id in self._allrefs.iteritems():
if name.startswith(R_HEADS):
heads[name[len(R_HEADS):]] = id
elif name.startswith(R_PUB):
pubed[name[len(R_PUB):]] = id
ready = []
for branch, id in heads.iteritems():
if branch in pubed and pubed[branch] == id:
continue
rb = self.GetUploadableBranch(branch)
if rb:
ready.append(rb)
return ready
def GetUploadableBranch(self, branch_name):
"""Get a single uploadable branch, or None.
"""
branch = self.GetBranch(branch_name)
base = branch.LocalMerge
if branch.LocalMerge:
rb = ReviewableBranch(self, branch, base)
if rb.commits:
return rb
return None
def UploadForReview(self, branch=None,
people=([],[]),
auto_topic=False):
"""Uploads the named branch for code review.
"""
if branch is None:
branch = self.CurrentBranch
if branch is None:
raise GitError('not currently on a branch')
branch = self.GetBranch(branch)
if not branch.LocalMerge:
raise GitError('branch %s does not track a remote' % branch.name)
if not branch.remote.review:
raise GitError('remote %s has no review url' % branch.remote.name)
dest_branch = branch.merge
if not dest_branch.startswith(R_HEADS):
dest_branch = R_HEADS + dest_branch
if not branch.remote.projectname:
branch.remote.projectname = self.name
branch.remote.Save()
if branch.remote.ReviewProtocol == 'ssh':
if dest_branch.startswith(R_HEADS):
dest_branch = dest_branch[len(R_HEADS):]
rp = ['gerrit receive-pack']
for e in people[0]:
rp.append('--reviewer=%s' % sq(e))
for e in people[1]:
rp.append('--cc=%s' % sq(e))
ref_spec = '%s:refs/for/%s' % (R_HEADS + branch.name, dest_branch)
if auto_topic:
ref_spec = ref_spec + '/' + branch.name
cmd = ['push']
cmd.append('--receive-pack=%s' % " ".join(rp))
cmd.append(branch.remote.SshReviewUrl(self.UserEmail))
cmd.append(ref_spec)
if GitCommand(self, cmd, bare = True).Wait() != 0:
raise UploadError('Upload failed')
else:
raise UploadError('Unsupported protocol %s' \
% branch.remote.review)
msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
self.bare_git.UpdateRef(R_PUB + branch.name,
R_HEADS + branch.name,
message = msg)
## Sync ##
def Sync_NetworkHalf(self, quiet=False):
"""Perform only the network IO portion of the sync process.
Local working directory/branch state is not affected.
"""
is_new = not self.Exists
if is_new:
if not quiet:
print >>sys.stderr
print >>sys.stderr, 'Initializing project %s ...' % self.name
self._InitGitDir()
self._InitRemote()
if not self._RemoteFetch(initial=is_new, quiet=quiet):
return False
#Check that the requested ref was found after fetch
#
try:
self.GetRevisionId()
except ManifestInvalidRevisionError:
# if the ref is a tag. We can try fetching
# the tag manually as a last resort
#
rev = self.revisionExpr
if rev.startswith(R_TAGS):
self._RemoteFetch(None, rev[len(R_TAGS):], quiet=quiet)
if self.worktree:
self.manifest.SetMRefs(self)
else:
self._InitMirrorHead()
try:
os.remove(os.path.join(self.gitdir, 'FETCH_HEAD'))
except OSError:
pass
return True
def PostRepoUpgrade(self):
self._InitHooks()
def _CopyFiles(self):
for file in self.copyfiles:
file._Copy()
def GetRevisionId(self, all=None):
if self.revisionId:
return self.revisionId
rem = self.GetRemote(self.remote.name)
rev = rem.ToLocal(self.revisionExpr)
if all is not None and rev in all:
return all[rev]
try:
return self.bare_git.rev_parse('--verify', '%s^0' % rev)
except GitError:
raise ManifestInvalidRevisionError(
'revision %s in %s not found' % (self.revisionExpr,
self.name))
def Sync_LocalHalf(self, syncbuf):
"""Perform only the local IO portion of the sync process.
Network access is not required.
"""
self._InitWorkTree()
all = self.bare_ref.all
self.CleanPublishedCache(all)
revid = self.GetRevisionId(all)
head = self.work_git.GetHead()
if head.startswith(R_HEADS):
branch = head[len(R_HEADS):]
try:
head = all[head]
except KeyError:
head = None
else:
branch = None
if branch is None or syncbuf.detach_head:
# Currently on a detached HEAD. The user is assumed to
# not have any local modifications worth worrying about.
#
if self.IsRebaseInProgress():
syncbuf.fail(self, _PriorSyncFailedError())
return
if head == revid:
# No changes; don't do anything further.
#
return
lost = self._revlist(not_rev(revid), HEAD)
if lost:
syncbuf.info(self, "discarding %d commits", len(lost))
try:
self._Checkout(revid, quiet=True)
except GitError, e:
syncbuf.fail(self, e)
return
self._CopyFiles()
return
if head == revid:
# No changes; don't do anything further.
#
return
branch = self.GetBranch(branch)
if not branch.LocalMerge:
# The current branch has no tracking configuration.
# Jump off it to a deatched HEAD.
#
syncbuf.info(self,
"leaving %s; does not track upstream",
branch.name)
try:
self._Checkout(revid, quiet=True)
except GitError, e:
syncbuf.fail(self, e)
return
self._CopyFiles()
return
upstream_gain = self._revlist(not_rev(HEAD), revid)
pub = self.WasPublished(branch.name, all)
if pub:
not_merged = self._revlist(not_rev(revid), pub)
if not_merged:
if upstream_gain:
# The user has published this branch and some of those
# commits are not yet merged upstream. We do not want
# to rewrite the published commits so we punt.
#
syncbuf.fail(self,
"branch %s is published (but not merged) and is now %d commits behind"
% (branch.name, len(upstream_gain)))
return
elif pub == head:
# All published commits are merged, and thus we are a
# strict subset. We can fast-forward safely.
#
def _doff():
self._FastForward(revid)
self._CopyFiles()
syncbuf.later1(self, _doff)
return
# Examine the local commits not in the remote. Find the
# last one attributed to this user, if any.
#
local_changes = self._revlist(not_rev(revid), HEAD, format='%H %ce')
last_mine = None
cnt_mine = 0
for commit in local_changes:
commit_id, committer_email = commit.split(' ', 1)
if committer_email == self.UserEmail:
last_mine = commit_id
cnt_mine += 1
if not upstream_gain and cnt_mine == len(local_changes):
return
if self.IsDirty(consider_untracked=False):
syncbuf.fail(self, _DirtyError())
return
# If the upstream switched on us, warn the user.
#
if branch.merge != self.revisionExpr:
if branch.merge and self.revisionExpr:
syncbuf.info(self,
'manifest switched %s...%s',
branch.merge,
self.revisionExpr)
elif branch.merge:
syncbuf.info(self,
'manifest no longer tracks %s',
branch.merge)
if cnt_mine < len(local_changes):
# Upstream rebased. Not everything in HEAD
# was created by this user.
#
syncbuf.info(self,
"discarding %d commits removed from upstream",
len(local_changes) - cnt_mine)
branch.remote = self.GetRemote(self.remote.name)
branch.merge = self.revisionExpr
branch.Save()
if cnt_mine > 0:
def _dorebase():
self._Rebase(upstream = '%s^1' % last_mine, onto = revid)
self._CopyFiles()
syncbuf.later2(self, _dorebase)
elif local_changes:
try:
self._ResetHard(revid)
self._CopyFiles()
except GitError, e:
syncbuf.fail(self, e)
return
else:
def _doff():
self._FastForward(revid)
self._CopyFiles()
syncbuf.later1(self, _doff)
def AddCopyFile(self, src, dest, absdest):
# dest should already be an absolute path, but src is project relative
# make src an absolute path
abssrc = os.path.join(self.worktree, src)
self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
def DownloadPatchSet(self, change_id, patch_id):
"""Download a single patch set of a single change to FETCH_HEAD.
"""
remote = self.GetRemote(self.remote.name)
cmd = ['fetch', remote.name]
cmd.append('refs/changes/%2.2d/%d/%d' \
% (change_id % 100, change_id, patch_id))
cmd.extend(map(lambda x: str(x), remote.fetch))
if GitCommand(self, cmd, bare=True).Wait() != 0:
return None
return DownloadedChange(self,
self.GetRevisionId(),
change_id,
patch_id,
self.bare_git.rev_parse('FETCH_HEAD'))
## Branch Management ##
def StartBranch(self, name):
"""Create a new branch off the manifest's revision.
"""
head = self.work_git.GetHead()
if head == (R_HEADS + name):
return True
all = self.bare_ref.all
if (R_HEADS + name) in all:
return GitCommand(self,
['checkout', name, '--'],
capture_stdout = True,
capture_stderr = True).Wait() == 0
branch = self.GetBranch(name)
branch.remote = self.GetRemote(self.remote.name)
branch.merge = self.revisionExpr
revid = self.GetRevisionId(all)
if head.startswith(R_HEADS):
try:
head = all[head]
except KeyError:
head = None
if revid and head and revid == head:
ref = os.path.join(self.gitdir, R_HEADS + name)
try:
os.makedirs(os.path.dirname(ref))
except OSError:
pass
_lwrite(ref, '%s\n' % revid)
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'ref: %s%s\n' % (R_HEADS, name))
branch.Save()
return True
if GitCommand(self,
['checkout', '-b', branch.name, revid],
capture_stdout = True,
capture_stderr = True).Wait() == 0:
branch.Save()
return True
return False
def CheckoutBranch(self, name):
"""Checkout a local topic branch.
"""
rev = R_HEADS + name
head = self.work_git.GetHead()
if head == rev:
# Already on the branch
#
return True
all = self.bare_ref.all
try:
revid = all[rev]
except KeyError:
# Branch does not exist in this project
#
return False
if head.startswith(R_HEADS):
try:
head = all[head]
except KeyError:
head = None
if head == revid:
# Same revision; just update HEAD to point to the new
# target branch, but otherwise take no other action.
#
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'ref: %s%s\n' % (R_HEADS, name))
return True
return GitCommand(self,
['checkout', name, '--'],
capture_stdout = True,
capture_stderr = True).Wait() == 0
def AbandonBranch(self, name):
"""Destroy a local topic branch.
"""
rev = R_HEADS + name
all = self.bare_ref.all
if rev not in all:
# Doesn't exist; assume already abandoned.
#
return True
head = self.work_git.GetHead()
if head == rev:
# We can't destroy the branch while we are sitting
# on it. Switch to a detached HEAD.
#
head = all[head]
revid = self.GetRevisionId(all)
if head == revid:
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'%s\n' % revid)
else:
self._Checkout(revid, quiet=True)
return GitCommand(self,
['branch', '-D', name],
capture_stdout = True,
capture_stderr = True).Wait() == 0
def PruneHeads(self):
"""Prune any topic branches already merged into upstream.
"""
cb = self.CurrentBranch
kill = []
left = self._allrefs
for name in left.keys():
if name.startswith(R_HEADS):
name = name[len(R_HEADS):]
if cb is None or name != cb:
kill.append(name)
rev = self.GetRevisionId(left)
if cb is not None \
and not self._revlist(HEAD + '...' + rev) \
and not self.IsDirty(consider_untracked = False):
self.work_git.DetachHead(HEAD)
kill.append(cb)
if kill:
old = self.bare_git.GetHead()
if old is None:
old = 'refs/heads/please_never_use_this_as_a_branch_name'
try:
self.bare_git.DetachHead(rev)
b = ['branch', '-d']
b.extend(kill)
b = GitCommand(self, b, bare=True,
capture_stdout=True,
capture_stderr=True)
b.Wait()
finally:
self.bare_git.SetHead(old)
left = self._allrefs
for branch in kill:
if (R_HEADS + branch) not in left:
self.CleanPublishedCache()
break
if cb and cb not in kill:
kill.append(cb)
kill.sort()
kept = []
for branch in kill:
if (R_HEADS + branch) in left:
branch = self.GetBranch(branch)
base = branch.LocalMerge
if not base:
base = rev
kept.append(ReviewableBranch(self, branch, base))
return kept
## Direct Git Commands ##
def _RemoteFetch(self, name=None, tag=None,
initial=False,
quiet=False):
if not name:
name = self.remote.name
ssh_proxy = False
if self.GetRemote(name).PreConnectFetch():
ssh_proxy = True
if initial:
alt = os.path.join(self.gitdir, 'objects/info/alternates')
try:
fd = open(alt, 'rb')
try:
ref_dir = fd.readline()
if ref_dir and ref_dir.endswith('\n'):
ref_dir = ref_dir[:-1]
finally:
fd.close()
except IOError, e:
ref_dir = None
if ref_dir and 'objects' == os.path.basename(ref_dir):
ref_dir = os.path.dirname(ref_dir)
packed_refs = os.path.join(self.gitdir, 'packed-refs')
remote = self.GetRemote(name)
all = self.bare_ref.all
ids = set(all.values())
tmp = set()
for r, id in GitRefs(ref_dir).all.iteritems():
if r not in all:
if r.startswith(R_TAGS) or remote.WritesTo(r):
all[r] = id
ids.add(id)
continue
if id in ids:
continue
r = 'refs/_alt/%s' % id
all[r] = id
ids.add(id)
tmp.add(r)
ref_names = list(all.keys())
ref_names.sort()
tmp_packed = ''
old_packed = ''
for r in ref_names:
line = '%s %s\n' % (all[r], r)
tmp_packed += line
if r not in tmp:
old_packed += line
_lwrite(packed_refs, tmp_packed)
else:
ref_dir = None
cmd = ['fetch']
if quiet:
cmd.append('--quiet')
if not self.worktree:
cmd.append('--update-head-ok')
cmd.append(name)
if tag is not None:
cmd.append('tag')
cmd.append(tag)
ok = GitCommand(self,
cmd,
bare = True,
ssh_proxy = ssh_proxy).Wait() == 0
if initial:
if ref_dir:
if old_packed != '':
_lwrite(packed_refs, old_packed)
else:
os.remove(packed_refs)
self.bare_git.pack_refs('--all', '--prune')
return ok
def _Checkout(self, rev, quiet=False):
cmd = ['checkout']
if quiet:
cmd.append('-q')
cmd.append(rev)
cmd.append('--')
if GitCommand(self, cmd).Wait() != 0:
if self._allrefs:
raise GitError('%s checkout %s ' % (self.name, rev))
def _ResetHard(self, rev, quiet=True):
cmd = ['reset', '--hard']
if quiet:
cmd.append('-q')
cmd.append(rev)
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s reset --hard %s ' % (self.name, rev))
def _Rebase(self, upstream, onto = None):
cmd = ['rebase']
if onto is not None:
cmd.extend(['--onto', onto])
cmd.append(upstream)
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s rebase %s ' % (self.name, upstream))
def _FastForward(self, head):
cmd = ['merge', head]
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s merge %s ' % (self.name, head))
def _InitGitDir(self):
if not os.path.exists(self.gitdir):
os.makedirs(self.gitdir)
self.bare_git.init()
mp = self.manifest.manifestProject
ref_dir = mp.config.GetString('repo.reference')
if ref_dir:
mirror_git = os.path.join(ref_dir, self.name + '.git')
repo_git = os.path.join(ref_dir, '.repo', 'projects',
self.relpath + '.git')
if os.path.exists(mirror_git):
ref_dir = mirror_git
elif os.path.exists(repo_git):
ref_dir = repo_git
else:
ref_dir = None
if ref_dir:
_lwrite(os.path.join(self.gitdir, 'objects/info/alternates'),
os.path.join(ref_dir, 'objects') + '\n')
if self.manifest.IsMirror:
self.config.SetString('core.bare', 'true')
else:
self.config.SetString('core.bare', None)
hooks = self._gitdir_path('hooks')
try:
to_rm = os.listdir(hooks)
except OSError:
to_rm = []
for old_hook in to_rm:
os.remove(os.path.join(hooks, old_hook))
self._InitHooks()
m = self.manifest.manifestProject.config
for key in ['user.name', 'user.email']:
if m.Has(key, include_defaults = False):
self.config.SetString(key, m.GetString(key))
def _InitHooks(self):
hooks = self._gitdir_path('hooks')
if not os.path.exists(hooks):
os.makedirs(hooks)
for stock_hook in repo_hooks():
name = os.path.basename(stock_hook)
if name in ('commit-msg') and not self.remote.review:
# Don't install a Gerrit Code Review hook if this
# project does not appear to use it for reviews.
#
continue
dst = os.path.join(hooks, name)
if os.path.islink(dst):
continue
if os.path.exists(dst):
if filecmp.cmp(stock_hook, dst, shallow=False):
os.remove(dst)
else:
_error("%s: Not replacing %s hook", self.relpath, name)
continue
try:
os.symlink(relpath(stock_hook, dst), dst)
except OSError, e:
if e.errno == errno.EPERM:
raise GitError('filesystem must support symlinks')
else:
raise
def _InitRemote(self):
if self.remote.url:
remote = self.GetRemote(self.remote.name)
remote.url = self.remote.url
remote.review = self.remote.review
remote.projectname = self.name
if self.worktree:
remote.ResetFetch(mirror=False)
else:
remote.ResetFetch(mirror=True)
remote.Save()
def _InitMirrorHead(self):
self._InitAnyMRef(HEAD)
def _InitAnyMRef(self, ref):
cur = self.bare_ref.symref(ref)
if self.revisionId:
if cur != '' or self.bare_ref.get(ref) != self.revisionId:
msg = 'manifest set to %s' % self.revisionId
dst = self.revisionId + '^0'
self.bare_git.UpdateRef(ref, dst, message = msg, detach = True)
else:
remote = self.GetRemote(self.remote.name)
dst = remote.ToLocal(self.revisionExpr)
if cur != dst:
msg = 'manifest set to %s' % self.revisionExpr
self.bare_git.symbolic_ref('-m', msg, ref, dst)
def _LinkWorkTree(self, relink=False):
dotgit = os.path.join(self.worktree, '.git')
if not relink:
os.makedirs(dotgit)
for name in ['config',
'description',
'hooks',
'info',
'logs',
'objects',
'packed-refs',
'refs',
'rr-cache',
'svn']:
try:
src = os.path.join(self.gitdir, name)
dst = os.path.join(dotgit, name)
if relink:
os.remove(dst)
if os.path.islink(dst) or not os.path.exists(dst):
os.symlink(relpath(src, dst), dst)
else:
raise GitError('cannot overwrite a local work tree')
except OSError, e:
if e.errno == errno.EPERM:
raise GitError('filesystem must support symlinks')
else:
raise
def _InitWorkTree(self):
dotgit = os.path.join(self.worktree, '.git')
if not os.path.exists(dotgit):
self._LinkWorkTree()
_lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId())
cmd = ['read-tree', '--reset', '-u']
cmd.append('-v')
cmd.append(HEAD)
if GitCommand(self, cmd).Wait() != 0:
raise GitError("cannot initialize work tree")
self._CopyFiles()
def _gitdir_path(self, path):
return os.path.join(self.gitdir, path)
def _revlist(self, *args, **kw):
a = []
a.extend(args)
a.append('--')
return self.work_git.rev_list(*a, **kw)
@property
def _allrefs(self):
return self.bare_ref.all
class _GitGetByExec(object):
def __init__(self, project, bare):
self._project = project
self._bare = bare
def LsOthers(self):
p = GitCommand(self._project,
['ls-files',
'-z',
'--others',
'--exclude-standard'],
bare = False,
capture_stdout = True,
capture_stderr = True)
if p.Wait() == 0:
out = p.stdout
if out:
return out[:-1].split("\0")
return []
def DiffZ(self, name, *args):
cmd = [name]
cmd.append('-z')
cmd.extend(args)
p = GitCommand(self._project,
cmd,
bare = False,
capture_stdout = True,
capture_stderr = True)
try:
out = p.process.stdout.read()
r = {}
if out:
out = iter(out[:-1].split('\0'))
while out:
try:
info = out.next()
path = out.next()
except StopIteration:
break
class _Info(object):
def __init__(self, path, omode, nmode, oid, nid, state):
self.path = path
self.src_path = None
self.old_mode = omode
self.new_mode = nmode
self.old_id = oid
self.new_id = nid
if len(state) == 1:
self.status = state
self.level = None
else:
self.status = state[:1]
self.level = state[1:]
while self.level.startswith('0'):
self.level = self.level[1:]
info = info[1:].split(' ')
info =_Info(path, *info)
if info.status in ('R', 'C'):
info.src_path = info.path
info.path = out.next()
r[info.path] = info
return r
finally:
p.Wait()
def GetHead(self):
if self._bare:
path = os.path.join(self._project.gitdir, HEAD)
else:
path = os.path.join(self._project.worktree, '.git', HEAD)
fd = open(path, 'rb')
try:
line = fd.read()
finally:
fd.close()
if line.startswith('ref: '):
return line[5:-1]
return line[:-1]
def SetHead(self, ref, message=None):
cmdv = []
if message is not None:
cmdv.extend(['-m', message])
cmdv.append(HEAD)
cmdv.append(ref)
self.symbolic_ref(*cmdv)
def DetachHead(self, new, message=None):
cmdv = ['--no-deref']
if message is not None:
cmdv.extend(['-m', message])
cmdv.append(HEAD)
cmdv.append(new)
self.update_ref(*cmdv)
def UpdateRef(self, name, new, old=None,
message=None,
detach=False):
cmdv = []
if message is not None:
cmdv.extend(['-m', message])
if detach:
cmdv.append('--no-deref')
cmdv.append(name)
cmdv.append(new)
if old is not None:
cmdv.append(old)
self.update_ref(*cmdv)
def DeleteRef(self, name, old=None):
if not old:
old = self.rev_parse(name)
self.update_ref('-d', name, old)
self._project.bare_ref.deleted(name)
def rev_list(self, *args, **kw):
if 'format' in kw:
cmdv = ['log', '--pretty=format:%s' % kw['format']]
else:
cmdv = ['rev-list']
cmdv.extend(args)
p = GitCommand(self._project,
cmdv,
bare = self._bare,
capture_stdout = True,
capture_stderr = True)
r = []
for line in p.process.stdout:
if line[-1] == '\n':
line = line[:-1]
r.append(line)
if p.Wait() != 0:
raise GitError('%s rev-list %s: %s' % (
self._project.name,
str(args),
p.stderr))
return r
def __getattr__(self, name):
name = name.replace('_', '-')
def runner(*args):
cmdv = [name]
cmdv.extend(args)
p = GitCommand(self._project,
cmdv,
bare = self._bare,
capture_stdout = True,
capture_stderr = True)
if p.Wait() != 0:
raise GitError('%s %s: %s' % (
self._project.name,
name,
p.stderr))
r = p.stdout
if r.endswith('\n') and r.index('\n') == len(r) - 1:
return r[:-1]
return r
return runner
class _PriorSyncFailedError(Exception):
def __str__(self):
return 'prior sync failed; rebase still in progress'
class _DirtyError(Exception):
def __str__(self):
return 'contains uncommitted changes'
class _InfoMessage(object):
def __init__(self, project, text):
self.project = project
self.text = text
def Print(self, syncbuf):
syncbuf.out.info('%s/: %s', self.project.relpath, self.text)
syncbuf.out.nl()
class _Failure(object):
def __init__(self, project, why):
self.project = project
self.why = why
def Print(self, syncbuf):
syncbuf.out.fail('error: %s/: %s',
self.project.relpath,
str(self.why))
syncbuf.out.nl()
class _Later(object):
def __init__(self, project, action):
self.project = project
self.action = action
def Run(self, syncbuf):
out = syncbuf.out
out.project('project %s/', self.project.relpath)
out.nl()
try:
self.action()
out.nl()
return True
except GitError, e:
out.nl()
return False
class _SyncColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'reposync')
self.project = self.printer('header', attr = 'bold')
self.info = self.printer('info')
self.fail = self.printer('fail', fg='red')
class SyncBuffer(object):
def __init__(self, config, detach_head=False):
self._messages = []
self._failures = []
self._later_queue1 = []
self._later_queue2 = []
self.out = _SyncColoring(config)
self.out.redirect(sys.stderr)
self.detach_head = detach_head
self.clean = True
def info(self, project, fmt, *args):
self._messages.append(_InfoMessage(project, fmt % args))
def fail(self, project, err=None):
self._failures.append(_Failure(project, err))
self.clean = False
def later1(self, project, what):
self._later_queue1.append(_Later(project, what))
def later2(self, project, what):
self._later_queue2.append(_Later(project, what))
def Finish(self):
self._PrintMessages()
self._RunLater()
self._PrintMessages()
return self.clean
def _RunLater(self):
for q in ['_later_queue1', '_later_queue2']:
if not self._RunQueue(q):
return
def _RunQueue(self, queue):
for m in getattr(self, queue):
if not m.Run(self):
self.clean = False
return False
setattr(self, queue, [])
return True
def _PrintMessages(self):
for m in self._messages:
m.Print(self)
for m in self._failures:
m.Print(self)
self._messages = []
self._failures = []
class MetaProject(Project):
"""A special project housed under .repo.
"""
def __init__(self, manifest, name, gitdir, worktree, relpath=None):
repodir = manifest.repodir
if relpath is None:
relpath = '.repo/%s' % name
Project.__init__(self,
manifest = manifest,
name = name,
gitdir = gitdir,
worktree = worktree,
remote = RemoteSpec('origin'),
relpath = relpath,
revisionExpr = 'refs/heads/master',
revisionId = None)
def PreSync(self):
if self.Exists:
cb = self.CurrentBranch
if cb:
cb = self.GetBranch(cb)
if cb.merge:
self.revisionExpr = cb.merge
self.revisionId = None
if cb.remote and cb.remote.name:
self.remote.name = cb.remote.name
@property
def LastFetch(self):
try:
fh = os.path.join(self.gitdir, 'FETCH_HEAD')
return os.path.getmtime(fh)
except OSError:
return 0
@property
def HasChanges(self):
"""Has the remote received new commits not yet checked out?
"""
if not self.remote or not self.revisionExpr:
return False
all = self.bare_ref.all
revid = self.GetRevisionId(all)
head = self.work_git.GetHead()
if head.startswith(R_HEADS):
try:
head = all[head]
except KeyError:
head = None
if revid == head:
return False
elif self._revlist(not_rev(HEAD), revid):
return True
return False
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cPickle
import os
import re
import subprocess
import sys
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
import time
import urllib2
from signal import SIGTERM
from urllib2 import urlopen, HTTPError
from error import GitError, UploadError
from trace import Trace
from git_command import GitCommand
from git_command import ssh_sock
from git_command import terminate_ssh_clients
R_HEADS = 'refs/heads/'
R_TAGS = 'refs/tags/'
ID_RE = re.compile('^[0-9a-f]{40}$')
REVIEW_CACHE = dict()
def IsId(rev):
return ID_RE.match(rev)
def _key(name):
parts = name.split('.')
if len(parts) < 2:
return name.lower()
parts[ 0] = parts[ 0].lower()
parts[-1] = parts[-1].lower()
return '.'.join(parts)
class GitConfig(object):
_ForUser = None
@classmethod
def ForUser(cls):
if cls._ForUser is None:
cls._ForUser = cls(file = os.path.expanduser('~/.gitconfig'))
return cls._ForUser
@classmethod
def ForRepository(cls, gitdir, defaults=None):
return cls(file = os.path.join(gitdir, 'config'),
defaults = defaults)
def __init__(self, file, defaults=None, pickleFile=None):
self.file = file
self.defaults = defaults
self._cache_dict = None
self._section_dict = None
self._remotes = {}
self._branches = {}
if pickleFile is None:
self._pickle = os.path.join(
os.path.dirname(self.file),
'.repopickle_' + os.path.basename(self.file))
else:
self._pickle = pickleFile
def ClearCache(self):
if os.path.exists(self._pickle):
os.remove(self._pickle)
self._cache_dict = None
self._section_dict = None
self._remotes = {}
self._branches = {}
def Has(self, name, include_defaults = True):
"""Return true if this configuration file has the key.
"""
if _key(name) in self._cache:
return True
if include_defaults and self.defaults:
return self.defaults.Has(name, include_defaults = True)
return False
def GetBoolean(self, name):
"""Returns a boolean from the configuration file.
None : The value was not defined, or is not a boolean.
True : The value was set to true or yes.
False: The value was set to false or no.
"""
v = self.GetString(name)
if v is None:
return None
v = v.lower()
if v in ('true', 'yes'):
return True
if v in ('false', 'no'):
return False
return None
def GetString(self, name, all=False):
"""Get the first value for a key, or None if it is not defined.
This configuration file is used first, if the key is not
defined or all = True then the defaults are also searched.
"""
try:
v = self._cache[_key(name)]
except KeyError:
if self.defaults:
return self.defaults.GetString(name, all = all)
v = []
if not all:
if v:
return v[0]
return None
r = []
r.extend(v)
if self.defaults:
r.extend(self.defaults.GetString(name, all = True))
return r
def SetString(self, name, value):
"""Set the value(s) for a key.
Only this configuration file is modified.
The supplied value should be either a string,
or a list of strings (to store multiple values).
"""
key = _key(name)
try:
old = self._cache[key]
except KeyError:
old = []
if value is None:
if old:
del self._cache[key]
self._do('--unset-all', name)
elif isinstance(value, list):
if len(value) == 0:
self.SetString(name, None)
elif len(value) == 1:
self.SetString(name, value[0])
elif old != value:
self._cache[key] = list(value)
self._do('--replace-all', name, value[0])
for i in xrange(1, len(value)):
self._do('--add', name, value[i])
elif len(old) != 1 or old[0] != value:
self._cache[key] = [value]
self._do('--replace-all', name, value)
def GetRemote(self, name):
"""Get the remote.$name.* configuration values as an object.
"""
try:
r = self._remotes[name]
except KeyError:
r = Remote(self, name)
self._remotes[r.name] = r
return r
def GetBranch(self, name):
"""Get the branch.$name.* configuration values as an object.
"""
try:
b = self._branches[name]
except KeyError:
b = Branch(self, name)
self._branches[b.name] = b
return b
def GetSubSections(self, section):
"""List all subsection names matching $section.*.*
"""
return self._sections.get(section, set())
def HasSection(self, section, subsection = ''):
"""Does at least one key in section.subsection exist?
"""
try:
return subsection in self._sections[section]
except KeyError:
return False
@property
def _sections(self):
d = self._section_dict
if d is None:
d = {}
for name in self._cache.keys():
p = name.split('.')
if 2 == len(p):
section = p[0]
subsect = ''
else:
section = p[0]
subsect = '.'.join(p[1:-1])
if section not in d:
d[section] = set()
d[section].add(subsect)
self._section_dict = d
return d
@property
def _cache(self):
if self._cache_dict is None:
self._cache_dict = self._Read()
return self._cache_dict
def _Read(self):
d = self._ReadPickle()
if d is None:
d = self._ReadGit()
self._SavePickle(d)
return d
def _ReadPickle(self):
try:
if os.path.getmtime(self._pickle) \
<= os.path.getmtime(self.file):
os.remove(self._pickle)
return None
except OSError:
return None
try:
Trace(': unpickle %s', self.file)
fd = open(self._pickle, 'rb')
try:
return cPickle.load(fd)
finally:
fd.close()
except EOFError:
os.remove(self._pickle)
return None
except IOError:
os.remove(self._pickle)
return None
except cPickle.PickleError:
os.remove(self._pickle)
return None
def _SavePickle(self, cache):
try:
fd = open(self._pickle, 'wb')
try:
cPickle.dump(cache, fd, cPickle.HIGHEST_PROTOCOL)
finally:
fd.close()
except IOError:
if os.path.exists(self._pickle):
os.remove(self._pickle)
except cPickle.PickleError:
if os.path.exists(self._pickle):
os.remove(self._pickle)
def _ReadGit(self):
"""
Read configuration data from git.
This internal method populates the GitConfig cache.
"""
c = {}
d = self._do('--null', '--list')
if d is None:
return c
for line in d.rstrip('\0').split('\0'):
if '\n' in line:
key, val = line.split('\n', 1)
else:
key = line
val = None
if key in c:
c[key].append(val)
else:
c[key] = [val]
return c
def _do(self, *args):
command = ['config', '--file', self.file]
command.extend(args)
p = GitCommand(None,
command,
capture_stdout = True,
capture_stderr = True)
if p.Wait() == 0:
return p.stdout
else:
GitError('git config %s: %s' % (str(args), p.stderr))
class RefSpec(object):
"""A Git refspec line, split into its components:
forced: True if the line starts with '+'
src: Left side of the line
dst: Right side of the line
"""
@classmethod
def FromString(cls, rs):
lhs, rhs = rs.split(':', 2)
if lhs.startswith('+'):
lhs = lhs[1:]
forced = True
else:
forced = False
return cls(forced, lhs, rhs)
def __init__(self, forced, lhs, rhs):
self.forced = forced
self.src = lhs
self.dst = rhs
def SourceMatches(self, rev):
if self.src:
if rev == self.src:
return True
if self.src.endswith('/*') and rev.startswith(self.src[:-1]):
return True
return False
def DestMatches(self, ref):
if self.dst:
if ref == self.dst:
return True
if self.dst.endswith('/*') and ref.startswith(self.dst[:-1]):
return True
return False
def MapSource(self, rev):
if self.src.endswith('/*'):
return self.dst[:-1] + rev[len(self.src) - 1:]
return self.dst
def __str__(self):
s = ''
if self.forced:
s += '+'
if self.src:
s += self.src
if self.dst:
s += ':'
s += self.dst
return s
_master_processes = []
_master_keys = set()
_ssh_master = True
_master_keys_lock = None
def init_ssh():
"""Should be called once at the start of repo to init ssh master handling.
At the moment, all we do is to create our lock.
"""
global _master_keys_lock
assert _master_keys_lock is None, "Should only call init_ssh once"
_master_keys_lock = _threading.Lock()
def _open_ssh(host, port=None):
global _ssh_master
# Acquire the lock. This is needed to prevent opening multiple masters for
# the same host when we're running "repo sync -jN" (for N > 1) _and_ the
# manifest <remote fetch="ssh://xyz"> specifies a different host from the
# one that was passed to repo init.
_master_keys_lock.acquire()
try:
# Check to see whether we already think that the master is running; if we
# think it's already running, return right away.
if port is not None:
key = '%s:%s' % (host, port)
else:
key = host
if key in _master_keys:
return True
if not _ssh_master \
or 'GIT_SSH' in os.environ \
or sys.platform in ('win32', 'cygwin'):
# failed earlier, or cygwin ssh can't do this
#
return False
# We will make two calls to ssh; this is the common part of both calls.
command_base = ['ssh',
'-o','ControlPath %s' % ssh_sock(),
host]
if port is not None:
command_base[1:1] = ['-p',str(port)]
# Since the key wasn't in _master_keys, we think that master isn't running.
# ...but before actually starting a master, we'll double-check. This can
# be important because we can't tell that that 'git@myhost.com' is the same
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
check_command = command_base + ['-O','check']
try:
Trace(': %s', ' '.join(check_command))
check_process = subprocess.Popen(check_command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
check_process.communicate() # read output, but ignore it...
isnt_running = check_process.wait()
if not isnt_running:
# Our double-check found that the master _was_ infact running. Add to
# the list of keys.
_master_keys.add(key)
return True
except Exception:
# Ignore excpetions. We we will fall back to the normal command and print
# to the log there.
pass
command = command_base[:1] + \
['-M', '-N'] + \
command_base[1:]
try:
Trace(': %s', ' '.join(command))
p = subprocess.Popen(command)
except Exception, e:
_ssh_master = False
print >>sys.stderr, \
'\nwarn: cannot enable ssh control master for %s:%s\n%s' \
% (host,port, str(e))
return False
_master_processes.append(p)
_master_keys.add(key)
time.sleep(1)
return True
finally:
_master_keys_lock.release()
def close_ssh():
global _master_keys_lock
terminate_ssh_clients()
for p in _master_processes:
try:
os.kill(p.pid, SIGTERM)
p.wait()
except OSError:
pass
del _master_processes[:]
_master_keys.clear()
d = ssh_sock(create=False)
if d:
try:
os.rmdir(os.path.dirname(d))
except OSError:
pass
# We're done with the lock, so we can delete it.
_master_keys_lock = None
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
URI_ALL = re.compile(r'^([a-z][a-z+]*)://([^@/]*@?[^/]*)/')
def _preconnect(url):
m = URI_ALL.match(url)
if m:
scheme = m.group(1)
host = m.group(2)
if ':' in host:
host, port = host.split(':')
else:
port = None
if scheme in ('ssh', 'git+ssh', 'ssh+git'):
return _open_ssh(host, port)
return False
m = URI_SCP.match(url)
if m:
host = m.group(1)
return _open_ssh(host)
return False
class Remote(object):
"""Configuration options related to a remote.
"""
def __init__(self, config, name):
self._config = config
self.name = name
self.url = self._Get('url')
self.review = self._Get('review')
self.projectname = self._Get('projectname')
self.fetch = map(lambda x: RefSpec.FromString(x),
self._Get('fetch', all=True))
self._review_protocol = None
def _InsteadOf(self):
globCfg = GitConfig.ForUser()
urlList = globCfg.GetSubSections('url')
longest = ""
longestUrl = ""
for url in urlList:
key = "url." + url + ".insteadOf"
insteadOfList = globCfg.GetString(key, all=True)
for insteadOf in insteadOfList:
if self.url.startswith(insteadOf) \
and len(insteadOf) > len(longest):
longest = insteadOf
longestUrl = url
if len(longest) == 0:
return self.url
return self.url.replace(longest, longestUrl, 1)
def PreConnectFetch(self):
connectionUrl = self._InsteadOf()
return _preconnect(connectionUrl)
@property
def ReviewProtocol(self):
if self._review_protocol is None:
if self.review is None:
return None
u = self.review
if not u.startswith('http:') and not u.startswith('https:'):
u = 'http://%s' % u
if u.endswith('/Gerrit'):
u = u[:len(u) - len('/Gerrit')]
if not u.endswith('/ssh_info'):
if not u.endswith('/'):
u += '/'
u += 'ssh_info'
if u in REVIEW_CACHE:
info = REVIEW_CACHE[u]
self._review_protocol = info[0]
self._review_host = info[1]
self._review_port = info[2]
else:
try:
info = urlopen(u).read()
if info == 'NOT_AVAILABLE':
raise UploadError('%s: SSH disabled' % self.review)
if '<' in info:
# Assume the server gave us some sort of HTML
# response back, like maybe a login page.
#
raise UploadError('%s: Cannot parse response' % u)
self._review_protocol = 'ssh'
self._review_host = info.split(" ")[0]
self._review_port = info.split(" ")[1]
except urllib2.URLError, e:
raise UploadError('%s: %s' % (self.review, e.reason[1]))
except HTTPError, e:
if e.code == 404:
self._review_protocol = 'http-post'
self._review_host = None
self._review_port = None
else:
raise UploadError('Upload over ssh unavailable')
REVIEW_CACHE[u] = (
self._review_protocol,
self._review_host,
self._review_port)
return self._review_protocol
def SshReviewUrl(self, userEmail):
if self.ReviewProtocol != 'ssh':
return None
username = self._config.GetString('review.%s.username' % self.review)
if username is None:
username = userEmail.split("@")[0]
return 'ssh://%s@%s:%s/%s' % (
username,
self._review_host,
self._review_port,
self.projectname)
def ToLocal(self, rev):
"""Convert a remote revision string to something we have locally.
"""
if IsId(rev):
return rev
if rev.startswith(R_TAGS):
return rev
if not rev.startswith('refs/'):
rev = R_HEADS + rev
for spec in self.fetch:
if spec.SourceMatches(rev):
return spec.MapSource(rev)
raise GitError('remote %s does not have %s' % (self.name, rev))
def WritesTo(self, ref):
"""True if the remote stores to the tracking ref.
"""
for spec in self.fetch:
if spec.DestMatches(ref):
return True
return False
def ResetFetch(self, mirror=False):
"""Set the fetch refspec to its default value.
"""
if mirror:
dst = 'refs/heads/*'
else:
dst = 'refs/remotes/%s/*' % self.name
self.fetch = [RefSpec(True, 'refs/heads/*', dst)]
def Save(self):
"""Save this remote to the configuration.
"""
self._Set('url', self.url)
self._Set('review', self.review)
self._Set('projectname', self.projectname)
self._Set('fetch', map(lambda x: str(x), self.fetch))
def _Set(self, key, value):
key = 'remote.%s.%s' % (self.name, key)
return self._config.SetString(key, value)
def _Get(self, key, all=False):
key = 'remote.%s.%s' % (self.name, key)
return self._config.GetString(key, all = all)
class Branch(object):
"""Configuration options related to a single branch.
"""
def __init__(self, config, name):
self._config = config
self.name = name
self.merge = self._Get('merge')
r = self._Get('remote')
if r:
self.remote = self._config.GetRemote(r)
else:
self.remote = None
@property
def LocalMerge(self):
"""Convert the merge spec to a local name.
"""
if self.remote and self.merge:
return self.remote.ToLocal(self.merge)
return None
def Save(self):
"""Save this branch back into the configuration.
"""
if self._config.HasSection('branch', self.name):
if self.remote:
self._Set('remote', self.remote.name)
else:
self._Set('remote', None)
self._Set('merge', self.merge)
else:
fd = open(self._config.file, 'ab')
try:
fd.write('[branch "%s"]\n' % self.name)
if self.remote:
fd.write('\tremote = %s\n' % self.remote.name)
if self.merge:
fd.write('\tmerge = %s\n' % self.merge)
finally:
fd.close()
def _Set(self, key, value):
key = 'branch.%s.%s' % (self.name, key)
return self._config.SetString(key, value)
def _Get(self, key, all=False):
key = 'branch.%s.%s' % (self.name, key)
return self._config.GetString(key, all = all)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import optparse
import sys
import manifest_loader
from error import NoSuchProjectError
class Command(object):
"""Base class for any command line action in repo.
"""
common = False
_optparse = None
def WantPager(self, opt):
return False
@property
def OptionParser(self):
if self._optparse is None:
try:
me = 'repo %s' % self.NAME
usage = self.helpUsage.strip().replace('%prog', me)
except AttributeError:
usage = 'repo %s' % self.NAME
self._optparse = optparse.OptionParser(usage = usage)
self._Options(self._optparse)
return self._optparse
def _Options(self, p):
"""Initialize the option parser.
"""
def Usage(self):
"""Display usage and terminate.
"""
self.OptionParser.print_usage()
sys.exit(1)
def Execute(self, opt, args):
"""Perform the action, after option parsing is complete.
"""
raise NotImplementedError
@property
def manifest(self):
return self.GetManifest()
def GetManifest(self, reparse=False, type=None):
return manifest_loader.GetManifest(self.repodir,
reparse=reparse,
type=type)
def GetProjects(self, args, missing_ok=False):
"""A list of projects that match the arguments.
"""
all = self.manifest.projects
mp = self.manifest.manifestProject
if mp.relpath == '.':
all = dict(all)
all[mp.name] = mp
result = []
if not args:
for project in all.values():
if missing_ok or project.Exists:
result.append(project)
else:
by_path = None
for arg in args:
project = all.get(arg)
if not project:
path = os.path.abspath(arg).replace('\\', '/')
if not by_path:
by_path = dict()
for p in all.values():
by_path[p.worktree] = p
try:
project = by_path[path]
except KeyError:
oldpath = None
while path \
and path != oldpath \
and path != self.manifest.topdir:
try:
project = by_path[path]
break
except KeyError:
oldpath = path
path = os.path.dirname(path)
if not project:
raise NoSuchProjectError(arg)
if not missing_ok and not project.Exists:
raise NoSuchProjectError(arg)
result.append(project)
def _getpath(x):
return x.relpath
result.sort(key=_getpath)
return result
class InteractiveCommand(Command):
"""Command which requires user interaction on the tty and
must not run within a pager, even if the user asks to.
"""
def WantPager(self, opt):
return False
class PagedCommand(Command):
"""Command which defaults to output in a pager, as its
display tends to be larger than one screen full.
"""
def WantPager(self, opt):
return True
class MirrorSafeCommand(object):
"""Command permits itself to run within a mirror,
and does not require a working directory.
"""
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from error import ManifestParseError
from editor import Editor
from git_config import GitConfig
from project import MetaProject
class Manifest(object):
"""any manifest format"""
def __init__(self, repodir):
self.repodir = os.path.abspath(repodir)
self.topdir = os.path.dirname(self.repodir)
self.globalConfig = GitConfig.ForUser()
Editor.globalConfig = self.globalConfig
self.repoProject = MetaProject(self, 'repo',
gitdir = os.path.join(repodir, 'repo/.git'),
worktree = os.path.join(repodir, 'repo'))
@property
def IsMirror(self):
return self.manifestProject.config.GetBoolean('repo.mirror')
@property
def projects(self):
return {}
@property
def notice(self):
return None
@property
def manifest_server(self):
return None
def InitBranch(self):
pass
def SetMRefs(self, project):
pass
def Upgrade_Local(self, old):
raise ManifestParseError, 'unsupported upgrade path'
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from time import time
from trace import IsTrace
_NOT_TTY = not os.isatty(2)
class Progress(object):
def __init__(self, title, total=0):
self._title = title
self._total = total
self._done = 0
self._lastp = -1
self._start = time()
self._show = False
def update(self, inc=1):
self._done += inc
if _NOT_TTY or IsTrace():
return
if not self._show:
if 0.5 <= time() - self._start:
self._show = True
else:
return
if self._total <= 0:
sys.stderr.write('\r%s: %d, ' % (
self._title,
self._done))
sys.stderr.flush()
else:
p = (100 * self._done) / self._total
if self._lastp != p:
self._lastp = p
sys.stderr.write('\r%s: %3d%% (%d/%d) ' % (
self._title,
p,
self._done,
self._total))
sys.stderr.flush()
def end(self):
if _NOT_TTY or IsTrace() or not self._show:
return
if self._total <= 0:
sys.stderr.write('\r%s: %d, done. \n' % (
self._title,
self._done))
sys.stderr.flush()
else:
p = (100 * self._done) / self._total
sys.stderr.write('\r%s: %3d%% (%d/%d), done. \n' % (
self._title,
p,
self._done,
self._total))
sys.stderr.flush()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: psimakov@google.com (Pavel Simakov)
"""Enforces schema and verifies course files for referential integrity.
Use this script to verify referential integrity of your course definition files
before you import them into the production instance of Google AppEngine.
Here is how to use the script:
- prepare your course files
- edit the data/unit.csv file
- edit the data/lesson.csv file
- edit the assets/js/activity-*.*.js files
- edit the assets/js/assessment-*.js files
- run the script from a command line by navigating to the root
directory of the app and then typing "python tools/verify.py"
- review the report printed to the console for errors and warnings
Good luck!
"""
import csv
import os
import re
import sys
BOOLEAN = object()
STRING = object()
FLOAT = object()
INTEGER = object()
CORRECT = object()
REGEX = object()
SCHEMA = {
"assessment": {
"assessmentName": STRING,
"preamble": STRING,
"checkAnswers": BOOLEAN,
"questionsList": [{
"questionHTML": STRING,
"lesson": STRING,
"choices": [STRING, CORRECT],
"correctAnswerNumeric": FLOAT,
"correctAnswerString": STRING,
"correctAnswerRegex": REGEX}]
}, "activity": [
STRING,
{
"questionType": "multiple choice",
"choices": [[STRING, BOOLEAN, STRING]]
}, {
"questionType": "multiple choice group",
"questionsList": [{
"questionHTML": STRING,
"choices": [STRING],
"correctIndex": INTEGER}],
"allCorrectOutput": STRING,
"someIncorrectOutput": STRING
}, {
"questionType": "freetext",
"correctAnswerRegex": REGEX,
"correctAnswerOutput": STRING,
"incorrectAnswerOutput": STRING,
"showAnswerOutput": STRING,
"showAnswerPrompt": STRING,
"outputHeight": STRING
}]}
UNITS_HEADER = (
"id,type,unit_id,title,release_date,now_available")
LESSONS_HEADER = (
"unit_id,unit_title,lesson_id,lesson_title,lesson_activity,"
"lesson_activity_name,lesson_notes,lesson_video_id,lesson_objectives")
NO_VERIFY_TAG_NAME_OPEN = "<gcb-no-verify>"
NO_VERIFY_TAG_NAME_CLOSE = "</gcb-no-verify>"
OUTPUT_FINE_LOG = False
OUTPUT_DEBUG_LOG = False
class SchemaException(Exception):
"""A class to represent a schema error."""
def FormatPrimitiveValueName(self, name):
if name == REGEX: return "REGEX(...)"
if name == CORRECT: return "CORRECT(...)"
if name == BOOLEAN: return "BOOLEAN"
return name
def FormatPrimitiveTypeName(self, name):
if name == BOOLEAN: return "BOOLEAN"
if name == REGEX: return "REGEX(...)"
if name == CORRECT: return "CORRECT(...)"
if name == STRING or isinstance(name, str): return "STRING"
if name == FLOAT: return "FLOAT"
if name == INTEGER: return "INTEGER"
if isinstance(name, dict): return "{...}"
if isinstance(name, list): return "[...]"
return "Unknown type name '%s'" % name.__class__.__name__
def FormatTypeNames(self, names):
if isinstance(names, list):
captions = []
for name in names:
captions.append(self.FormatPrimitiveTypeName(name))
return captions
else:
return self.FormatPrimitiveTypeName(names)
def FormatTypeName(self, types):
if isinstance(types, dict):
return self.FormatTypeNames(types.keys())
if isinstance(types, list):
return self.FormatTypeNames(types)
return self.FormatTypeNames([types])
def __init__(self, message, value=None, types=None, path=None):
prefix = ""
if path: prefix = "Error at %s\n" % path
if types:
if value:
message = prefix + message % (
self.FormatPrimitiveValueName(value), self.FormatTypeNames(types))
else:
message = prefix + message % self.FormatTypeNames(types)
else:
if value:
message = prefix + message % self.FormatPrimitiveValueName(value)
else:
message = prefix + message
super(SchemaException, self).__init__(message)
class Context(object):
""""A class that manages a stack of traversal contexts."""
def __init__(self):
self.parent = None
self.path = ["/"]
def New(self, names):
""""Derives a new context from the current one."""
context = Context()
context.parent = self
context.path = list(self.path)
if names:
if isinstance(names, list):
for name in names:
if name:
context.path.append("/" + "%s" % name)
else:
context.path.append("/" + "%s" % names)
return context
def FormatPath(self):
""""Formats the canonical name of this context."""
return "".join(self.path)
class SchemaHelper(object):
"""A class that knows how to apply the schema."""
def __init__(self):
self.type_stats = {}
def VisitElement(self, atype, value, context, is_terminal=True):
""""This method is called once for each schema element being traversed."""
if self.type_stats.has_key(atype):
count = self.type_stats[atype]
else:
count = 0
self.type_stats[atype] = count + 1
if is_terminal:
self.parse_log.append(" TERMINAL: %s %s = %s" % (
atype, context.FormatPath(), value))
else:
self.parse_log.append(" NON-TERMINAL: %s %s" % (
atype, context.FormatPath()))
def ExtractAllTermsToDepth(self, key, values, type_map):
"""Walks schema recursively and creates a list of all known terms."""
"""Walks schema type map recursively to depth and creates a list of all
possible {key: value} pairs. The latter is the list of all non-terminal
and terminal terms allowed in the schema. The list of terms from this
method can be bound to an execution context for evaluating whether a given
instance's map complies with the schema."""
if key: type_map.update({key: key})
if values == REGEX:
type_map.update({"regex": lambda x: REGEX})
return
if values == CORRECT:
type_map.update({"correct": lambda x: CORRECT})
return
if values == BOOLEAN:
type_map.update({"true": BOOLEAN, "false": BOOLEAN})
return
if values == STRING or values == INTEGER:
return
if isinstance(values, dict):
for new_key, new_value in values.items():
self.ExtractAllTermsToDepth(new_key, new_value, type_map)
return
if isinstance(values, list):
for new_value in values:
self.ExtractAllTermsToDepth(None, new_value, type_map)
return
def FindSelectors(self, type_map):
"""Finds all type selectors."""
"""Finds all elements in the type map where both a key and a value are
strings. These elements are used to find one specific type map among
several alternative type maps."""
selector = {}
for akey, avalue in type_map.items():
if isinstance(akey, str) and isinstance(avalue, str):
selector.update({akey: avalue})
return selector
def FindCompatibleDict(self, value_map, type_map, context):
"""Find the type map most compatible with the value map."""
""""A value map is considered compatible with a type map when former
contains the same key names and the value types as the type map."""
# special case when we have just one type; check name and type are the same
if len(type_map) == 1:
for value_key in value_map.keys():
for key in type_map[0].keys():
if value_key == key: return key, type_map[0]
raise SchemaException(
"Expected: '%s'\nfound: %s", type_map[0].keys()[0], value_map)
# case when we have several types to choose from
for adict in type_map:
dict_selector = self.FindSelectors(adict)
for akey, avalue in dict_selector.items():
if value_map[akey] == avalue: return akey, adict
return None, None
def CheckSingleValueMatchesType(self, value, atype, context):
"""Checks if a single value matches a specific (primitive) type."""
if atype == BOOLEAN:
if (value == "True") or (value == "False") or (value == "true") or (
value == "false") or (isinstance(value, bool)) or value == BOOLEAN:
self.VisitElement("BOOLEAN", value, context)
return True
else:
raise SchemaException("Expected: 'true' or 'false'\nfound: %s", value)
if isinstance(atype, str):
if isinstance(value, str):
self.VisitElement("str", value, context)
return True
else:
raise SchemaException("Expected: 'string'\nfound: %s", value)
if atype == STRING:
if isinstance(value, str):
self.VisitElement("STRING", value, context)
return True
else:
raise SchemaException("Expected: 'string'\nfound: %s", value)
if atype == REGEX and value == REGEX:
self.VisitElement("REGEX", value, context)
return True
if atype == CORRECT and value == CORRECT:
self.VisitElement("CORRECT", value, context)
return True
if atype == FLOAT:
if IsNumber(value):
self.VisitElement("NUMBER", value, context)
return True
else:
raise SchemaException("Expected: 'number'\nfound: %s", value)
if atype == INTEGER:
if IsInteger(value):
self.VisitElement("INTEGER", value, context)
return True
else:
raise SchemaException(
"Expected: 'integer'\nfound: %s", value, path=context.FormatPath())
raise SchemaException(
"Unexpected value '%s'\n"
"for type %s", value, atype, path=context.FormatPath())
def CheckValueListMatchesType(self, value, atype, context):
"""Checks if all items in value list match a specific type."""
for value_item in value:
found = False
for atype_item in atype:
if isinstance(atype_item, list):
for atype_item_item in atype_item:
if self.DoesValueMatchType(value_item, atype_item_item, context):
found = True
break
else:
if self.DoesValueMatchType(value_item, atype_item, context):
found = True
break
if not found:
raise SchemaException("Expected: '%s'\nfound: %s", atype, value)
return True
def CheckValueMatchesType(self, value, atype, context):
"""Checks if single value or a list of values match a specific type."""
if isinstance(atype, list) and isinstance(value, list):
return self.CheckValueListMatchesType(value, atype, context)
else:
return self.CheckSingleValueMatchesType(value, atype, context)
def DoesValueMatchType(self, value, atype, context):
"""Same as other method, but does not throw an exception."""
try:
return self.CheckValueMatchesType(value, atype, context)
except SchemaException:
return False
def DoesValueMatchesOneOfTypes(self, value, types, context):
"""Checks if a value matches to one of the types in the list."""
type_names = None
if isinstance(types, list):
type_names = types
if type_names:
for i in range(0, len(type_names)):
if self.DoesValueMatchType(value, type_names[i], context):
return True
return False
def DoesValueMatchMapOfType(self, value, types, context):
"""Checks if value matches any variation of {...} type."""
# find all possible map types
maps = []
for atype in types:
if isinstance(atype, dict): maps.append(atype)
if len(maps) == 0 and isinstance(types, dict):
maps.append(types)
# check if the structure of value matches one of the maps
if isinstance(value, dict):
aname, adict = self.FindCompatibleDict(value, maps, context)
if adict:
self.VisitElement("dict", value, context.New(aname), False)
for akey, avalue in value.items():
if not adict.has_key(akey):
raise SchemaException(
"Unknown term '%s'", akey, path=context.FormatPath())
self.CheckValueOfValidType(
avalue, adict[akey], context.New([aname, akey]))
return True
raise SchemaException(
"The value:\n %s\n"
"is incompatible with expected type(s):\n %s",
value, types, path=context.FormatPath())
return False
def FormatNameWithIndex(self, alist, aindex):
"""custom function to format a context name with an array element index."""
if len(alist) == 1:
return ""
else:
return "[%s]" % aindex
def DoesValueMatchListOfTypesInOrder(self, value, types, context, target):
"""Iterates the value and the types in given order and checks for match."""
all_values_are_lists = True
for avalue in value:
if not isinstance(avalue, list):
all_values_are_lists = False
if all_values_are_lists:
for i in range(0, len(value)):
self.CheckValueOfValidType(value[i], types, context.New(
self.FormatNameWithIndex(value, i)), True)
else:
if len(target) != len(value):
raise SchemaException(
"Expected: '%s' values\n" + "found: %s." % value,
len(target), path=context.FormatPath())
for i in range(0, len(value)):
self.CheckValueOfValidType(value[i], target[i], context.New(
self.FormatNameWithIndex(value, i)))
return True
def DoesValueMatchListOfTypesAnyOrder(self, value, types, context, lists):
"""Iterates the value and types and checks if they match in any order."""
target = lists
if len(target) == 0:
if not isinstance(types, list):
raise SchemaException(
"Unsupported type %s",
None, types, path=context.FormatPath())
target = types
for i in range(0, len(value)):
found = False
for atarget in target:
try:
self.CheckValueOfValidType(value[i], atarget, context.New(
self.FormatNameWithIndex(value, i)))
found = True
break
except:
continue
if not found:
raise SchemaException(
"The value:\n %s\n"
"is incompatible with expected type(s):\n %s",
value, types, path=context.FormatPath())
return True
def DoesValueMatchListOfType(self, value, types, context, in_order):
"""Checks if a value matches a variation of [...] type."""
"""Extra argument controls whether matching must be done in a specific or
in any order. A specific order is demanded by [[...]]] construct,
i.e. [[STRING, INTEGER, BOOLEAN]], while sub elements inside {...} and
[...] can be matched in any order."""
# prepare a list of list types
lists = []
for atype in types:
if isinstance(atype, list): lists.append(atype)
if len(lists) > 1:
raise SchemaException(
"Unable to validate types with multiple alternative "
"lists %s", None, types, path=context.FormatPath())
if isinstance(value, list):
if len(lists) > 1:
raise SchemaException(
"Allowed at most one list\nfound: %s.",
None, types, path=context.FormatPath())
# determine if list is in order or not as hinted by double array [[..]];
# [STRING, NUMBER] is in any order, but [[STRING, NUMBER]] demands order
ordered = len(lists) == 1 and isinstance(types, list)
if in_order or ordered:
return self.DoesValueMatchListOfTypesInOrder(
value, types, context, lists[0])
else:
return self.DoesValueMatchListOfTypesAnyOrder(
value, types, context, lists)
return False
def CheckValueOfValidType(self, value, types, context, in_order=None):
"""Check if a value matches any of the given types."""
if not (isinstance(types, list) or isinstance(types, dict)):
self.CheckValueMatchesType(value, types, context)
return
if self.DoesValueMatchListOfType(value, types, context, in_order): return
if self.DoesValueMatchMapOfType(value, types, context): return
if self.DoesValueMatchesOneOfTypes(value, types, context): return
raise SchemaException("Unknown type %s", value, path=context.FormatPath())
def CheckInstancesMatchSchema(self, values, types, name):
"""Recursively decomposes 'values' to see if they match schema (types)."""
self.parse_log = []
context = Context().New(name)
self.parse_log.append(" ROOT %s" % context.FormatPath())
# handle {..} containers
if isinstance(types, dict):
if not isinstance(values, dict):
raise SchemaException("Error at '/': expected {...}, found %s" % (
values.__class_.__name__))
self.CheckValueOfValidType(values, types, context.New([]))
return
# handle [...] containers
if isinstance(types, list):
if not isinstance(values, list):
raise SchemaException("Error at '/': expected [...], found %s" % (
values.__class_.__name__))
for i in range(0, len(values)):
self.CheckValueOfValidType(
values[i], types, context.New("[%s]" % i))
return
raise SchemaException(
"Expected an array or a dictionary.", None, path=context.FormatPath())
def escapeQuote(value):
return str(value).replace("'", r"\'")
class Unit(object):
"""A class to represent a Unit."""
def __init__(self):
self.id = 0
self.type = ""
self.unit_id = ""
self.title = ""
self.release_date = ""
self.now_available = False
def ListProperties(self, name, output):
output.append("%s['id'] = %s;" % (name, self.id))
output.append("%s['type'] = '%s';" % (name, escapeQuote(self.type)))
output.append("%s['unit_id'] = '%s';" % (name, escapeQuote(self.unit_id)))
output.append("%s['title'] = '%s';" % (name, escapeQuote(self.title)))
output.append("%s['release_date'] = '%s';" % (name, escapeQuote(self.release_date)))
output.append("%s['now_available'] = %s;" % (name, str(self.now_available).lower()))
class Lesson(object):
"""A class to represent a Lesson."""
def __init__(self):
self.unit_id = 0
self.unit_title = ""
self.lesson_id = 0
self.lesson_title = ""
self.lesson_activity = ""
self.lesson_activity_name = ""
self.lesson_notes = ""
self.lesson_video_id = ""
self.lesson_objectives = ""
def ListProperties(self, name, output):
activity = "false"
if self.lesson_activity == "yes":
activity = "true"
output.append("%s['unit_id'] = %s;" % (name, self.unit_id))
output.append("%s['unit_title'] = '%s';" % (name, escapeQuote(self.unit_title)))
output.append("%s['lesson_id'] = %s;" % (name, self.lesson_id))
output.append("%s['lesson_title'] = '%s';" % (name, escapeQuote(self.lesson_title)))
output.append("%s['lesson_activity'] = %s;" % (name, activity))
output.append("%s['lesson_activity_name'] = '%s';" % (name, escapeQuote(self.lesson_activity_name)))
output.append("%s['lesson_notes'] = '%s';" % (name, escapeQuote(self.lesson_notes)))
output.append("%s['lesson_video_id'] = '%s';" % (name, escapeQuote(self.lesson_video_id)))
output.append("%s['lesson_objectives'] = '%s';" % (name, escapeQuote(self.lesson_objectives)))
def ToIdString(self):
return "%s.%s.%s" % (self.unit_id, self.lesson_id, self.lesson_title)
class Assessment(object):
"""A class to represent a Assessment."""
def __init__(self):
self.scope = {}
SchemaHelper().ExtractAllTermsToDepth(
"assessment", SCHEMA["assessment"], self.scope)
class Activity(object):
"""A class to represent a Activity."""
def __init__(self):
self.scope = {}
SchemaHelper().ExtractAllTermsToDepth(
"activity", SCHEMA["activity"], self.scope)
def Echo(x):
print x
def IsInteger(s):
try:
return int(s) == float(s)
except ValueError:
return False
def IsBoolean(s):
try:
return s == "True" or s == "False"
except ValueError:
return False
def IsNumber(s):
try:
float(s)
return True
except ValueError:
return False
def IsOneOf(value, values):
for current in values:
if value == current:
return True
return False
def TextToLineNumberedText(text):
"""Adds line numbers to the provided text."""
lines = text.split("\n")
results = []
i = 1
for line in lines:
results.append(str(i) + ": " + line)
i += 1
return "\n ".join(results)
def SetObjectAttributes(target_object, names, values):
"""Sets object attributes from provided values."""
if len(names) != len(values):
raise SchemaException(
"The number of elements must match: %s and %s" % (names, values))
for i in range(0, len(names)):
if IsInteger(values[i]):
# if we are setting an attribute of an object that support metadata, try
# to infer the target type and convert 'int' into 'str' here
target_type = None
if hasattr(target_object.__class__, names[i]):
target_type = getattr(target_object.__class__, names[i]).data_type.__name__
if target_type and (target_type == 'str' or target_type == 'basestring'):
setattr(target_object, names[i], str(values[i]))
else:
setattr(target_object, names[i], int(values[i]))
continue
if IsBoolean(values[i]):
setattr(target_object, names[i], bool(values[i]))
continue
setattr(target_object, names[i], values[i])
def ReadObjectsFromCsvFile(fname, header, new_object):
return ReadObjectsFromCsv(csv.reader(open(fname)), header, new_object)
def ReadObjectsFromCsv(value_rows, header, new_object):
values = []
for row in value_rows:
if len(row) == 0:
continue
values.append(row)
names = header.split(",")
if names != values[0]:
raise SchemaException(
"Error reading CSV header.\n "
"Header row had %s element(s): %s\n "
"Expected header row with %s element(s): %s" % (
len(values[0]), values[0], len(names), names))
items = []
for i in range (1, len(values)):
if len(names) != len(values[i]):
raise SchemaException(
"Error reading CSV data row.\n "
"Row #%s had %s element(s): %s\n "
"Expected %s element(s): %s" % (
i, len(values[i]), values[i], len(names), names))
item = new_object()
SetObjectAttributes(item, names, values[i])
items.append(item)
return items
def EscapeJavascriptRegex(text):
return re.sub(r"([:][ ]*)([/])(.*)([/][ismx]*)", r': regex("\2\3\4")', text)
def RemoveJavaScriptSingleLineComment(text):
text = re.sub(re.compile("^(.*?)[ ]+//(.*)$", re.MULTILINE), r"\1", text)
text = re.sub(re.compile("^//(.*)$", re.MULTILINE), r"", text)
return text
def RemoveJavaScriptMultiLineComment(text):
return re.sub(re.compile("/\*(.*)\*/", re.MULTILINE + re.DOTALL), r"", text)
def RemoveContentMarkedNoVerify(content):
"""Removes content that should not be verified."""
"""If you have any free-form JavaScript in the activity file, you need
to place it between //<gcb-no-verify> ... //</gcb-no-verify> tags
so that the verifier can selectively ignore it."""
pattern = re.compile("(%s)(.*)(%s)" % (
NO_VERIFY_TAG_NAME_OPEN, NO_VERIFY_TAG_NAME_CLOSE), re.DOTALL)
return re.sub(pattern, "", content)
def ConvertJavaScriptToPython(content, root_name):
"""Removes JavaScript specific syntactic constructs."""
"""Reads the content and removes JavaScript comments, var's, and escapes
regular expressions."""
content = RemoveContentMarkedNoVerify(content)
content = RemoveJavaScriptMultiLineComment(content)
content = RemoveJavaScriptSingleLineComment(content)
content = content.replace("var %s = " % root_name, "%s = " % root_name)
content = EscapeJavascriptRegex(content)
return content
def ConvertJavaScriptFileToPython(fname, root_name):
return ConvertJavaScriptToPython(
"".join(open(fname, "r").readlines()), root_name)
def EvaluatePythonExpressionFromText(content, root_name, scope):
"""Compiles and evaluates a Python script in a restricted environment."""
"""First compiles and then evaluates a Python script text in a restricted
environment using provided bindings. Returns the resulting bindings if
evaluation completed."""
# create a new execution scope that has only the schema terms defined;
# remove all other languages constructs including __builtins__
restricted_scope = {}
restricted_scope.update(scope)
restricted_scope.update({"__builtins__": {}})
code = compile(content, "<string>", "exec")
exec code in restricted_scope
if not restricted_scope[root_name]:
raise Exception("Unable to find '%s'" % root_name)
return restricted_scope
def EvaluateJavaScriptExpressionFromFile(fname, root_name, scope, error):
content = ConvertJavaScriptFileToPython(fname, root_name)
try:
return EvaluatePythonExpressionFromText(content, root_name, scope)
except:
error("Unable to parse %s in file %s\n %s" % (
root_name, fname, TextToLineNumberedText(content)))
for message in sys.exc_info():
error(str(message))
class Verifier(object):
"""A class that verifies all course content."""
"""A class that knows how to verify Units, Lessons, Assessment and Activities,
and understands their relationships."""
def __init__(self):
self.schema_helper = SchemaHelper()
self.errors = 0
self.warnings = 0
def VerifyUnitFields(self, units):
self.export.append("units = Array();")
for unit in units:
if not IsOneOf(unit.now_available, [True, False]):
self.error("Bad now_available '%s' for unit id %s; expected 'True' or 'False'" % (
unit.now_available, unit.id))
if not IsOneOf(unit.type, ["U", "A", "O"]):
self.error("Bad type '%s' for unit id %s; expected 'U', 'A', or 'O'" % (
unit.type, unit.id))
if unit.type == "A":
if not IsOneOf(unit.unit_id, ("Pre", "Mid", "Fin")):
self.error(
"Bad unit_id '%s'; expected 'Pre', 'Mid' or 'Fin' for unit id %s"
% (unit.unit_id, unit.id))
if unit.type == "U":
if not IsInteger(unit.unit_id):
self.error("Expected integer unit_id, found %s in unit id %s" % (
unit.unit_id, unit.id))
self.export.append("")
self.export.append("units[%s] = Array();" % unit.id)
self.export.append("units[%s]['lessons'] = Array();" % unit.id)
unit.ListProperties("units[%s]" % unit.id, self.export)
def VerifyLessonFields(self, lessons):
for lesson in lessons:
if not IsOneOf(lesson.lesson_activity, ["yes", ""]):
self.error("Bad lesson_activity '%s' for lesson_id %s" %
(lesson.lesson_activity, lesson.lesson_id))
self.export.append("")
self.export.append("units[%s]['lessons'][%s] = Array();" % (
lesson.unit_id, lesson.lesson_id))
lesson.ListProperties("units[%s]['lessons'][%s]" % (
lesson.unit_id, lesson.lesson_id), self.export)
def VerifyUnitLessonRelationships(self, units, lessons):
"""Checks how units relate to lessons and otherwise."""
"""Checks that each lesson points to a valid unit and all lessons are used
by one of the units."""
used_lessons = []
units.sort(key=lambda x: x.id)
#for unit in units:
for i in range(0, len(units)):
unit = units[i]
# check that unit ids are 1-based and sequential
if unit.id != i + 1:
self.error("Unit out of order: %s" % (unit.id))
# get the list of lessons for each unit
self.fine("Unit %s: %s" % (unit.id, unit.title))
unit_lessons = []
for lesson in lessons:
if lesson.unit_id == unit.unit_id:
unit_lessons.append(lesson)
used_lessons.append(lesson)
# inspect all lessons for the current unit
unit_lessons.sort(key=lambda x: x.lesson_id)
for j in range(0, len(unit_lessons)):
lesson = unit_lessons[j]
# check that lesson_ids are 1-based and sequential
if lesson.lesson_id != j + 1:
self.warn(
"Lesson lesson_id is out of order: expected %s, found %s (%s)"
% (j + 1, lesson.lesson_id, lesson.ToIdString()))
self.fine(" Lesson %s: %s" % (lesson.lesson_id, lesson.lesson_title))
# find lessons not used by any of the units
unused_lessons = list(lessons)
for lesson in used_lessons:
unused_lessons.remove(lesson)
for lesson in unused_lessons:
self.warn("Unused lesson_id %s (%s)" % (
lesson.lesson_id, lesson.ToIdString()))
# check all lessons point to known units
for lesson in lessons:
has = False
for unit in units:
if lesson.unit_id == unit.unit_id:
has = True
break
if not has:
self.error("Lesson has unknown unit_id %s (%s)" %
(lesson.unit_id, lesson.ToIdString()))
def VerifyActivities(self, lessons):
"""Loads and verifies all activities."""
self.info("Loading activities:")
count = 0
for lesson in lessons:
if lesson.lesson_activity == "yes":
count += 1
fname = os.path.join(
os.path.dirname(__file__),
"../assets/js/activity-" + str(lesson.unit_id) + "." +
str(lesson.lesson_id) + ".js")
if not os.path.exists(fname):
self.error(" Missing activity: %s" % fname)
else:
activity = EvaluateJavaScriptExpressionFromFile(
fname, "activity", Activity().scope, self.error)
self.VerifyActivityInstance(activity, fname)
self.info("Read %s activities" % count)
def VerifyAssessment(self, units):
"""Loads and verifies all assessments."""
self.info("Loading assessment:")
count = 0
for unit in units:
if unit.type == "A":
count += 1
fname = os.path.join(
os.path.dirname(__file__),
"../assets/js/assessment-" + str(unit.unit_id) + ".js")
if not os.path.exists(fname):
self.error(" Missing assessment: %s" % fname)
else:
assessment = EvaluateJavaScriptExpressionFromFile(
fname, "assessment", Assessment().scope, self.error)
self.VerifyAssessmentInstance(assessment, fname)
self.info("Read %s assessments" % count)
def FormatParseLog(self):
return "Parse log:\n%s" % "\n".join(self.schema_helper.parse_log)
def VerifyAssessmentInstance(self, scope, fname):
"""Verifies compliance of assessment with schema."""
if scope:
try:
self.schema_helper.CheckInstancesMatchSchema(
scope["assessment"], SCHEMA["assessment"], "assessment")
self.info(" Verified assessment %s" % fname)
if OUTPUT_DEBUG_LOG: self.info(self.FormatParseLog())
except SchemaException as e:
self.error(" Error in assessment %s\n%s" % (
fname, self.FormatParseLog()))
raise e
else:
self.error(" Unable to evaluate 'assessment =' in %s" % fname)
def VerifyActivityInstance(self, scope, fname):
"""Verifies compliance of activity with schema."""
if scope:
try:
self.schema_helper.CheckInstancesMatchSchema(
scope["activity"], SCHEMA["activity"], "activity")
self.info(" Verified activity %s" % fname)
if OUTPUT_DEBUG_LOG: self.info(self.FormatParseLog())
except SchemaException as e:
self.error(" Error in activity %s\n%s" % (
fname, self.FormatParseLog()))
raise e
else:
self.error(" Unable to evaluate 'activity =' in %s" % fname)
def fine(self, x):
if OUTPUT_FINE_LOG:
self.echo_func("FINE: " + x)
def info(self, x):
self.echo_func("INFO: " + x)
def warn(self, x):
self.warnings += 1
self.echo_func("WARNING: " + x)
def error(self, x):
self.errors += 1
self.echo_func("ERROR: " + x)
def LoadAndVerifyModel(self, echo_func):
"""Loads, parses and verifies all content for a course."""
self.echo_func = echo_func
self.export = [];
self.info("Started verification in: %s" % __file__)
unit_file = os.path.join(os.path.dirname(__file__), "../data/unit.csv")
lesson_file = os.path.join(os.path.dirname(__file__), "../data/lesson.csv")
self.info("Loading units from: %s" % unit_file)
units = ReadObjectsFromCsvFile(unit_file, UNITS_HEADER, lambda: Unit())
self.info("Read %s units" % len(units))
self.info("Loading lessons from: %s" % lesson_file)
lessons = ReadObjectsFromCsvFile(lesson_file, LESSONS_HEADER, lambda: Lesson())
self.info("Read %s lessons" % len(lessons))
self.VerifyUnitFields(units)
self.VerifyLessonFields(lessons)
self.VerifyUnitLessonRelationships(units, lessons)
try:
self.VerifyActivities(lessons)
self.VerifyAssessment(units)
except SchemaException as e:
self.error(str(e))
self.info("Schema usage statistics: %s" % self.schema_helper.type_stats)
self.info("Completed verification: %s warnings, %s errors." %
(self.warnings, self.errors))
return self.errors
def RunAllRegexUnitTests():
assert EscapeJavascriptRegex(
"blah regex: /site:bls.gov?/i, blah") == (
"blah regex: regex(\"/site:bls.gov?/i\"), blah")
assert EscapeJavascriptRegex(
"blah regex: /site:http:\/\/www.google.com?q=abc/i, blah") == (
"blah regex: regex(\"/site:http:\/\/www.google.com?q=abc/i\"), blah")
assert RemoveJavaScriptMultiLineComment(
"blah\n/*\ncomment\n*/\nblah") == "blah\n\nblah"
assert RemoveJavaScriptMultiLineComment(
"blah\nblah /*\ncomment\nblah */\nblah") == (
"blah\nblah \nblah")
assert RemoveJavaScriptSingleLineComment(
"blah\n// comment\nblah") == "blah\n\nblah"
assert RemoveJavaScriptSingleLineComment(
"blah\nblah http://www.foo.com\nblah") == (
"blah\nblah http://www.foo.com\nblah")
assert RemoveJavaScriptSingleLineComment(
"blah\nblah // comment\nblah") == "blah\nblah\nblah"
assert RemoveJavaScriptSingleLineComment(
"blah\nblah // comment http://www.foo.com\nblah") == "blah\nblah\nblah"
assert RemoveContentMarkedNoVerify(
"blah1\n// <gcb-no-verify>/blah2\n// </gcb-no-verify>\nblah3") == (
"blah1\n// \nblah3")
def RunAllSchemaHelperUnitTests():
def AssertSame(a, b):
if a != b:
raise Exception("Expected:\n %s\nFound:\n %s" % (a, b))
def AssertPass(instances, types, expected_result=None):
try:
schema_helper = SchemaHelper()
result = schema_helper.CheckInstancesMatchSchema(instances, types, "test")
if OUTPUT_DEBUG_LOG: print "\n".join(schema_helper.parse_log)
if expected_result: AssertSame(expected_result, result)
except SchemaException as e:
if OUTPUT_DEBUG_LOG:
print str(e)
print "\n".join(schema_helper.parse_log)
raise
def AssertFails(func):
try:
func()
raise Exception("Expected to fail")
except SchemaException as e:
if OUTPUT_DEBUG_LOG:
print str(e)
pass
def AssertFail(instances, types):
AssertFails(lambda: AssertPass(instances, types))
# CSV tests
ReadObjectsFromCsv([["id", "type"], [1, "none"]], "id,type", lambda: Unit())
AssertFails(lambda: ReadObjectsFromCsv(
[["id", "type"], [1, "none"]], "id,type,title", lambda: Unit()))
AssertFails(lambda: ReadObjectsFromCsv(
[["id", "type", "title"], [1, "none"]], "id,type,title", lambda: Unit()))
# context tests
AssertSame(
Context().New([]).New(["a"]).New(["b", "c"]).FormatPath(), ("//a/b/c"))
# simple map tests
AssertPass({"name": "Bob"}, {"name": STRING}, None)
AssertFail("foo", "bar")
AssertFail({"name": "Bob"}, {"name": INTEGER})
AssertFail({"name": 12345}, {"name": STRING})
AssertFail({"amount": 12345}, {"name": INTEGER})
AssertFail({"regex": CORRECT}, {"regex": REGEX})
AssertPass({"name": "Bob"}, {"name": STRING, "phone": STRING})
AssertPass({"name": "Bob"}, {"phone": STRING, "name": STRING})
AssertPass({"name": "Bob"},
{"phone": STRING, "name": STRING, "age": INTEGER})
# mixed attributes tests
AssertPass({"colors": ["red", "blue"]}, {"colors": [STRING]})
AssertPass({"colors": []}, {"colors": [STRING]})
AssertFail({"colors": {"red": "blue"}}, {"colors": [STRING]})
AssertFail({"colors": {"red": "blue"}}, {"colors": [FLOAT]})
AssertFail({"colors": ["red", "blue", 5.5]}, {"colors": [STRING]})
AssertFail({"colors": ["red", "blue", {"foo": "bar"}]}, {"colors": [STRING]})
AssertFail({"colors": ["red", "blue"], "foo": "bar"}, {"colors": [STRING]})
AssertPass({"colors": ["red", 1]}, {"colors": [[STRING, INTEGER]]})
AssertFail({"colors": ["red", "blue"]}, {"colors": [[STRING, INTEGER]]})
AssertFail({"colors": [1, 2, 3]}, {"colors": [[STRING, INTEGER]]})
AssertFail({"colors": ["red", 1, 5.3]}, {"colors": [[STRING, INTEGER]]})
AssertPass({"colors": ["red", "blue"]}, {"colors": [STRING]})
AssertFail({"colors": ["red", "blue"]}, {"colors": [[STRING]]})
AssertFail({"colors": ["red", ["blue"]]}, {"colors": [STRING]})
AssertFail({"colors": ["red", ["blue", "green"]]}, {"colors": [STRING]})
# required attribute tests
AssertPass({"colors": ["red", 5]}, {"colors": [[STRING, INTEGER]]})
AssertFail({"colors": ["red", 5]}, {"colors": [[INTEGER, STRING]]})
AssertPass({"colors": ["red", 5]}, {"colors": [STRING, INTEGER]})
AssertPass({"colors": ["red", 5]}, {"colors": [INTEGER, STRING]})
AssertFail({"colors": ["red", 5, "FF0000"]}, {"colors": [[STRING, INTEGER]]})
# an array and a map of primitive type tests
AssertPass({"color": {"name": "red", "rgb": "FF0000"}},
{"color": {"name": STRING, "rgb": STRING}})
AssertFail({"color": {"name": "red", "rgb": ["FF0000"]}},
{"color": {"name": STRING, "rgb": STRING}})
AssertFail({"color": {"name": "red", "rgb": "FF0000"}},
{"color": {"name": STRING, "rgb": INTEGER}})
AssertFail({"color": {"name": "red", "rgb": "FF0000"}},
{"color": {"name": STRING, "rgb": {"hex": STRING}}})
AssertPass({"color": {"name": "red", "rgb": "FF0000"}},
{"color": {"name": STRING, "rgb": STRING}})
AssertPass({"colors":
[{"name": "red", "rgb": "FF0000"},
{"name": "blue", "rgb": "0000FF"}]},
{"colors": [{"name": STRING, "rgb": STRING}]})
AssertFail({"colors":
[{"name": "red", "rgb": "FF0000"},
{"phone": "blue", "rgb": "0000FF"}]},
{"colors": [{"name": STRING, "rgb": STRING}]})
# boolean type tests
AssertPass({"name": "Bob", "active": "true"},
{"name": STRING, "active": BOOLEAN})
AssertPass({"name": "Bob", "active": True},
{"name": STRING, "active": BOOLEAN})
AssertPass({"name": "Bob", "active": [5, True, "False"]},
{"name": STRING, "active": [INTEGER, BOOLEAN]})
AssertPass({"name": "Bob", "active": [5, True, "false"]},
{"name": STRING, "active": [STRING, INTEGER, BOOLEAN]})
AssertFail({"name": "Bob", "active": [5, True, "False"]},
{"name": STRING, "active": [[INTEGER, BOOLEAN]]})
# optional attribute tests
AssertPass({"points":
[{"x": 1, "y": 2, "z": 3}, {"x": 3, "y": 2, "z": 1},
{"x": 2, "y": 3, "z": 1}]},
{"points": [{"x": INTEGER, "y": INTEGER, "z": INTEGER}]})
AssertPass({"points":
[{"x": 1, "z": 3}, {"x": 3, "y": 2}, {"y": 3, "z": 1}]},
{"points": [{"x": INTEGER, "y": INTEGER, "z": INTEGER}]})
AssertPass({"account":
[{"name": "Bob", "age": 25, "active": True}]},
{"account": [{"age": INTEGER, "name": STRING, "active": BOOLEAN}]})
AssertPass({"account":
[{"name": "Bob", "active": True}]},
{"account": [{"age": INTEGER, "name": STRING, "active": BOOLEAN}]})
# nested array tests
AssertFail({"name": "Bob", "active": [5, True, "false"]},
{"name": STRING, "active": [[BOOLEAN]]})
AssertFail({"name": "Bob", "active": [True]},
{"name": STRING, "active": [[STRING]]})
AssertPass({"name": "Bob", "active": ["true"]},
{"name": STRING, "active": [[STRING]]})
AssertPass({"name": "flowers", "price": ["USD", 9.99]},
{"name": STRING, "price": [[STRING, FLOAT]]})
AssertPass({"name": "flowers", "price":
[["USD", 9.99], ["CAD", 11.79], ["RUB", 250.23]]},
{"name": STRING, "price": [[STRING, FLOAT]]})
# selector tests
AssertPass({"likes": [{"state": "CA", "food": "cheese"},
{"state": "NY", "drink": "wine"}]},
{"likes": [{"state": "CA", "food": STRING},
{"state": "NY", "drink": STRING}]})
AssertPass({"likes": [{"state": "CA", "food": "cheese"},
{"state": "CA", "food": "nuts"}]},
{"likes": [{"state": "CA", "food": STRING},
{"state": "NY", "drink": STRING}]})
AssertFail({"likes": {"state": "CA", "drink": "cheese"}},
{"likes": [{"state": "CA", "food": STRING},
{"state": "NY", "drink": STRING}]})
def RunAllUnitTests():
RunAllRegexUnitTests()
RunAllSchemaHelperUnitTests()
RunAllUnitTests()
if __name__ == "__main__":
Verifier().LoadAndVerifyModel(Echo)
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: psimakov@google.com (Pavel Simakov)
"""Allows export of Lessons and Units to other systems."""
import verify, os
from datetime import datetime
RELEASE_TAG = "1.0"
def Echo(x):
pass
def ExportToJavaScript(fname, lines, date):
file = open("%s.js" % fname, 'w')
file.write("// Course Builder %s JavaScript Export on %s\n" % (RELEASE_TAG, date))
file.write("// begin\n")
code = []
code.append("function gcb_import(){")
for line in lines:
if len(line) != 0: code.append(" %s" % line)
else: code.append("")
code.append("")
code.append(" return units;")
code.append("}")
file.write("\n".join(code))
file.write("\n// end");
file.close()
def ExportToPython(fname, lines, date):
code = []
code.append("class Array(dict):")
code.append(" pass")
code.append("")
code.append("true = True")
code.append("false = False")
code.append("")
code.append("def gcb_import():")
for line in lines:
code.append(" %s" % line)
code.append(" return units")
code.append("")
code.append("if __name__ == \"__main__\":")
code.append(" init()")
file = open("%s.py" % fname, 'w')
file.write("# Course Builder %s Python Export on %s\n" % (RELEASE_TAG, date))
file.write("# begin\n")
file.write("\n".join(code))
file.write("\n# end");
file.close()
def ExportToPHP(fname, lines, date):
file = open("%s.php" % fname, 'w')
file.write("<?php\n")
file.write("// Course Builder %s PHP Export on %s\n" % (RELEASE_TAG, date))
file.write("// begin\n")
code = []
code.append("function gcb_import(){")
for line in lines:
if len(line) != 0: code.append(" $%s" % line)
else: code.append("")
code.append("")
code.append(" return $units;")
code.append("}")
file.write("\n".join(code))
file.write("\n// end");
file.write("?>")
file.close()
def ExportToFile(fname, lines):
date = datetime.utcnow()
ExportToJavaScript(fname, lines, date)
ExportToPython(fname, lines, date)
ExportToPHP(fname, lines, date)
if __name__ == "__main__":
print "Export started using %s" % os.path.realpath(__file__)
verifier = verify.Verifier()
errors = verifier.LoadAndVerifyModel(Echo)
if errors and len(errors) != 0:
raise Exception(
"Please fix all errors reported by tools/verify.py before continuing!")
fname = os.path.join(os.getcwd(), "coursebuilder_course")
ExportToFile(fname, verifier.export)
print "Export complete to %s" % fname
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import appengine_config, webapp2
from controllers import servings, sites, utils, assessments
# FIXME: set to 'False' before going live
debug = True
urls = [
('/', servings.CourseHandler),
('/activity', servings.ActivityHandler),
('/announcements', utils.AnnouncementsHandler),
('/answer', assessments.AnswerHandler),
('/assessment', servings.AssessmentHandler),
('/course', servings.CourseHandler),
('/forum', servings.ForumHandler),
('/preview', servings.PreviewHandler),
('/register', utils.RegisterHandler),
('/student/editstudent', utils.StudentEditStudentHandler),
('/student/home', utils.StudentProfileHandler),
('/student/unenroll', utils.StudentUnenrollHandler),
('/unit', servings.UnitHandler)]
sites.ApplicationRequestHandler.bind(urls)
app = webapp2.WSGIApplication(
[(r'(.*)', sites.ApplicationRequestHandler)], debug=debug)
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functional tests for Course Builder."""
__author__ = 'Sean Lip'
import logging
import os
import sys
import unittest
import webtest
from google.appengine.ext import testbed
EXPECTED_TEST_COUNT = 14
def EmptyEnviron():
os.environ['AUTH_DOMAIN'] = 'example.com'
os.environ['SERVER_NAME'] = 'localhost'
os.environ['SERVER_PORT'] = '8080'
os.environ['USER_EMAIL'] = ''
os.environ['USER_ID'] = ''
class BaseTestClass(unittest.TestCase):
"""Base class for setting up and tearing down test cases."""
def getApp(self):
"""Returns the main application to be tested."""
raise Exception('Not implemented.')
def setUp(self):
EmptyEnviron()
# setup an app to be tested
self.testapp = webtest.TestApp(self.getApp())
self.testbed = testbed.Testbed()
self.testbed.activate()
# declare any relevant App Engine service stubs here
self.testbed.init_user_stub()
self.testbed.init_memcache_stub()
self.testbed.init_datastore_v3_stub()
def tearDown(self):
self.testbed.deactivate()
def createTestSuite():
"""Loads all tests classes from appropriate modules."""
import tests.functional.tests as functional
return unittest.TestLoader().loadTestsFromModule(functional)
def fix_sys_path():
"""Fix the sys.path to include GAE extra paths."""
import dev_appserver
# dev_appserver.fix_sys_path() prepends GAE paths to sys.path and hides
# our classes like 'tests' behind other modules that have 'tests'
# here, unlike dev_appserver, we append, not prepend path so our classes are first
sys.path = sys.path + dev_appserver.EXTRA_PATHS[:]
def main():
"""Starts in-process server and runs all test cases in this module."""
fix_sys_path()
result = unittest.TextTestRunner(verbosity=2).run(createTestSuite())
if result.testsRun != EXPECTED_TEST_COUNT:
raise Exception(
'Expected %s tests to be run, not %s.' % (EXPECTED_TEST_COUNT, result.testsRun))
if len(result.errors) != 0 or len(result.failures) != 0:
raise Exception(
"Functional test suite failed: %s errors, %s failures of %s tests run." % (
len(result.errors), len(result.failures), result.testsRun))
if __name__ == '__main__':
logging.basicConfig(level=3)
main()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests that walk through Course Builder pages."""
__author__ = 'Sean Lip'
import os
from controllers import sites, utils
from models import models
from controllers.sites import AssertFails
from actions import *
from controllers.assessments import getScore, getAllScores
class StudentAspectTest(TestBase):
"""Tests the site from the Student perspective."""
def testRegistration(self):
"""Test student registration."""
email = 'test_registration@example.com'
name1 = 'Test Student'
name2 = 'John Smith'
name3 = 'Pavel Simakov'
login(email)
register(self, name1)
check_profile(self, name1)
change_name(self, name2)
unregister(self)
register(self, name3)
check_profile(self, name3)
def testLimitedClassSizeRegistration(self):
"""Test student registration with MAX_CLASS_SIZE."""
utils.MAX_CLASS_SIZE = 2
email1 = '111@example.com'
name1 = 'student1'
email2 = '222@example.com'
name2 = 'student2'
email3 = '333@example.com'
name3 = 'student3'
login(email1)
register(self, name1)
logout()
login(email2)
register(self, name2)
logout()
login(email3)
AssertFails(lambda: register(self, name3))
logout()
# now unset the limit, and registration should succeed
utils.MAX_CLASS_SIZE = None
login(email3)
register(self, name3)
logout()
def testPermissions(self):
"""Test student permissions to pages."""
email = 'test_permissions@example.com'
name = 'Test Permissions'
login(email)
register(self, name)
Permissions.assert_enrolled(self)
unregister(self)
Permissions.assert_unenrolled(self)
register(self, name)
Permissions.assert_enrolled(self)
def testLoginAndLogout(self):
"""Test if login and logout behave as expected."""
email = 'test_login_logout@example.com'
Permissions.assert_logged_out(self)
login(email)
Permissions.assert_unenrolled(self)
logout()
Permissions.assert_logged_out(self)
class PageCacheTest(TestBase):
"""Checks if pages cached for one user are properly render for another."""
def testPageCache(self):
"""Test a user can't see other user pages."""
email1 = 'user1@foo.com'
name1 = 'User 1'
email2 = 'user2@foo.com'
name2 = 'User 2'
# login as one user and view 'unit' and other pages, which are not cached
login(email1)
register(self, name1)
Permissions.assert_enrolled(self)
response = view_unit(self)
AssertContains(email1, response.body)
logout()
# login as another user and check 'unit' and other pages show correct new email
login(email2)
register(self, name2)
Permissions.assert_enrolled(self)
response = view_unit(self)
AssertContains(email2, response.body)
logout()
class AssessmentTest(TestBase):
def submitAssessment(self, name, args):
response = self.get('assessment?name=%s' % name)
AssertContains('<script src="assets/js/assessment-%s.js"></script>' % name, response.body)
response = self.post('answer', args)
AssertEquals(response.status_int, 200)
return response
def testCoursePass(self):
"""Tests student passing final exam."""
email = 'test_pass@google.com'
name = 'Test Pass'
post = {'assessment_type': 'postcourse',
'num_correct': '0', 'num_questions': '4',
'score': '100.00'}
# register
login(email)
register(self, name)
# submit answer
response = self.submitAssessment('Post', post)
AssertEquals(response.status_int, 200)
AssertContains('Your score is 70%', response.body)
AssertContains('you have passed the course', response.body)
# scheck pass
response = check_profile(self, name)
AssertContains('70', response.body)
AssertContains('100', response.body)
def testAssessments(self):
"""Tests assessment scores are properly submitted and summarized."""
email = 'test_assessments@google.com'
name = 'Test Assessments'
pre = {'assessment_type': 'precourse',
'0': 'false', '1': 'false',
'2': 'false', '3': 'false',
'num_correct': '0', 'num_questions': '4',
'score': '1.00'}
mid = {'assessment_type': 'midcourse',
'0': 'false', '1': 'false',
'2': 'false', '3': 'false',
'num_correct': '0', 'num_questions': '4',
'score': '2.00'}
post = {'assessment_type': 'postcourse',
'0': 'false', '1': 'false',
'2': 'false', '3': 'false',
'num_correct': '0', 'num_questions': '4',
'score': '3.00'}
second_mid = {'assessment_type': 'midcourse',
'0': 'false', '1': 'false',
'2': 'false', '3': 'false',
'num_correct': '0', 'num_questions': '4',
'score': '1.00'}
second_post = {'assessment_type': 'postcourse',
'0': 'false', '1': 'false',
'2': 'false', '3': 'false',
'num_correct': '0', 'num_questions': '4',
'score': '100000'}
# register
login(email)
register(self, name)
# check no scores exist right now
student = models.Student.get_enrolled_student_by_email(email)
assert len(getAllScores(student)) == 0
# submit assessments and check numbers of scores recorded
self.submitAssessment('Pre', pre)
student = models.Student.get_enrolled_student_by_email(email)
assert len(getAllScores(student)) == 1
self.submitAssessment('Mid', mid)
student = models.Student.get_enrolled_student_by_email(email)
assert len(getAllScores(student)) == 2
self.submitAssessment('Post', post)
student = models.Student.get_enrolled_student_by_email(email)
assert len(getAllScores(student)) == 4 # also includes overall_score
# check scores are recorded properly
student = models.Student.get_enrolled_student_by_email(email)
assert int(getScore(student, 'precourse')) == 1
assert int(getScore(student, 'midcourse')) == 2
assert int(getScore(student, 'postcourse')) == 3
assert int(getScore(student, 'overall_score')) == int((0.30*2) + (0.70*3))
# try posting a new midcourse exam with a lower score; nothing should change
self.submitAssessment('Mid', second_mid)
student = models.Student.get_enrolled_student_by_email(email)
assert int(getScore(student, 'precourse')) == 1
assert int(getScore(student, 'midcourse')) == 2
assert int(getScore(student, 'postcourse')) == 3
assert int(getScore(student, 'overall_score')) == int((0.30*2) + (0.70*3))
# now try posting a postcourse exam with a higher score and note changes
self.submitAssessment('Post', second_post)
student = models.Student.get_enrolled_student_by_email(email)
assert int(getScore(student, 'precourse')) == 1
assert int(getScore(student, 'midcourse')) == 2
assert int(getScore(student, 'postcourse')) == 100000
assert int(getScore(student, 'overall_score')) == int((0.30*2) + (0.70*100000))
class CourseUrlRewritingTest(StudentAspectTest, PageCacheTest, AssessmentTest):
"""Runs existing tests using rewrite rules for '/courses/pswg' base URL."""
def setUp(self):
self.base = '/courses/pswg'
self.namespace = 'gcb-courses-pswg-tests-ns'
config ='course:%s:/:%s' % (self.base, self.namespace)
os.environ[sites.GCB_COURSES_CONFIG_ENV_VAR_NAME] = config
super(CourseUrlRewritingTest, self).setUp()
def tearDown(self):
super(CourseUrlRewritingTest, self).tearDown()
del os.environ[sites.GCB_COURSES_CONFIG_ENV_VAR_NAME]
def canonicalize(self, href, response=None):
"""Force self.base on to all URL's, but only if no current response exists."""
if response:
# look for <base> tag in the response to compute the canonical URL
return super(CourseUrlRewritingTest, self).canonicalize(href, response)
else:
# prepend self.base to compute the canonical URL
if not href.startswith('/'):
href = '/%s' % href
href = '%s%s' % (self.base, href)
return href
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: psimakov@google.com (Pavel Simakov)
"""A collection of actions for testing Course Builder pages."""
import logging
import main
import os
import re
import suite
from models.models import Unit, Lesson
from tools import verify
from google.appengine.api import namespace_manager
class TestBase(suite.BaseTestClass):
def getApp(self):
main.debug = True
return main.app
def setUp(self):
super(TestBase, self).setUp()
# set desired namespace and inits data
namespace = namespace_manager.get_namespace()
try:
if hasattr(self, 'namespace'):
namespace_manager.set_namespace(self.namespace)
self.initDatastore()
finally:
if namespace == '':
namespace_manager.set_namespace(None)
def initDatastore(self):
"""Loads course data from the CSV files."""
logging.info('')
logging.info('Initializing datastore')
# load and parse data from CSV file
unit_file = os.path.join(os.path.dirname(__file__), "../../data/unit.csv")
lesson_file = os.path.join(os.path.dirname(__file__), "../../data/lesson.csv")
units = verify.ReadObjectsFromCsvFile(unit_file, verify.UNITS_HEADER, Unit)
lessons = verify.ReadObjectsFromCsvFile(lesson_file, verify.LESSONS_HEADER, Lesson)
# store all units and lessons
for unit in units:
unit.put()
for lesson in lessons:
lesson.put()
assert Unit.all().count() == 11
assert Lesson.all().count() == 29
def canonicalize(self, href, response=None):
"""Create absolute URL using <base> if defined, '/' otherwise."""
if href.startswith('/'):
return href
base = '/'
if response:
match = re.search(r'<base href=[\'"]?([^\'" >]+)', response.body)
if match and not href.startswith('/'):
base = match.groups()[0]
return '%s%s' % (base, href)
def hookResponse(self, response):
"""Modify response.goto() to properly compute URL using <base> if defined."""
gotox = response.goto
def newGoto(href, method='get', **args):
return gotox(self.canonicalize(href), method, **args)
response.goto = newGoto
return response
def get(self, url):
url = self.canonicalize(url)
logging.info('HTTP Get: %s' % url)
response = self.testapp.get(url)
return self.hookResponse(response)
def post(self, url, params):
url = self.canonicalize(url)
logging.info('HTTP Post: %s' % url)
response = self.testapp.post(url, params)
return self.hookResponse(response)
def click(self, response, name):
logging.info('Link click: %s' % name)
response = response.click(name)
return self.hookResponse(response)
def submit(self, form):
logging.info('Form submit: %s' % form)
response = form.submit()
return self.hookResponse(response)
def AssertEquals(expected, actual):
if not expected == actual:
raise Exception('Expected \'%s\', does not match actual \'%s\'.' % (expected, actual))
def AssertContains(needle, haystack):
if not needle in haystack:
raise Exception('Can\'t find \'%s\' in \'%s\'.' % (needle, haystack))
def AssertNoneFail(browser, callbacks):
"""Invokes all callbacks and expects each one not to fail."""
for callback in callbacks:
callback(browser)
def AssertAllFail(browser, callbacks):
"""Invokes all callbacks and expects each one to fail."""
class MustFail(Exception):
pass
for callback in callbacks:
try:
callback(browser)
raise MustFail('Expected to fail: %s().' % callback.__name__)
except MustFail as e:
raise e
except Exception:
pass
def login(email):
os.environ['USER_EMAIL'] = email
os.environ['USER_ID'] = 'user1'
def get_current_user_email():
email = os.environ['USER_EMAIL']
if not email:
raise Exception('No current user.')
return email
def logout():
del os.environ['USER_EMAIL']
del os.environ['USER_ID']
def register(browser, name):
response = browser.get('/')
AssertEquals(response.status_int, 302)
response = view_registration(browser)
response.form.set('form01', name)
response = browser.submit(response.form)
AssertContains('Thank you for registering for', response.body)
check_profile(browser, name)
def check_profile(browser, name):
response = view_my_profile(browser)
AssertContains('Email', response.body)
AssertContains(name, response.body)
AssertContains(get_current_user_email(), response.body)
return response
def view_registration(browser):
response = browser.get('register')
AssertContains('What is your name?', response.body)
return response
def view_preview(browser):
response = browser.get('preview')
AssertContains(' the stakes are high.', response.body)
AssertContains('<li><p class="top_content">Pre-course assessment</p></li>', response.body)
return response
def view_course(browser):
response = browser.get('course')
AssertContains(' the stakes are high.', response.body)
AssertContains('<a href="assessment?name=Pre">Pre-course assessment</a>', response.body)
AssertContains(get_current_user_email(), response.body)
return response
def view_unit(browser):
response = browser.get('unit?unit=1&lesson=1')
AssertContains('Unit 1 - Introduction', response.body)
AssertContains(get_current_user_email(), response.body)
return response
def view_activity(browser):
response = browser.get('activity?unit=1&lesson=2')
AssertContains('<script src="assets/js/activity-1.2.js"></script>', response.body)
AssertContains(get_current_user_email(), response.body)
return response
def view_announcements(browser):
response = browser.get('announcements')
AssertContains('Example Announcement', response.body)
AssertContains(get_current_user_email(), response.body)
return response
def view_my_profile(browser):
response = browser.get('student/home')
AssertContains('Date enrolled', response.body)
AssertContains(get_current_user_email(), response.body)
return response
def view_forum(browser):
response = browser.get('forum')
AssertContains('document.getElementById("forum_embed").src =', response.body)
AssertContains(get_current_user_email(), response.body)
return response
def view_assessments(browser):
for name in ['Pre', 'Mid', 'Fin']:
response = browser.get('assessment?name=%s' % name)
assert 'assets/js/assessment-%s.js' % name in response.body
AssertEquals(response.status_int, 200)
AssertContains(get_current_user_email(), response.body)
def change_name(browser, new_name):
response = browser.get('student/home')
response.form.set('name', new_name)
response = browser.submit(response.form)
AssertEquals(response.status_int, 302)
check_profile(browser, new_name)
def unregister(browser):
response = browser.get('student/home')
response = browser.click(response, 'Unenroll')
AssertContains('to unenroll from', response.body)
browser.submit(response.form)
class Permissions():
"""Defines who can see what."""
@classmethod
def get_logged_out_allowed_pages(cls):
"""Returns all pages that a logged-out user can see."""
return [view_preview]
@classmethod
def get_logged_out_denied_pages(cls):
"""Returns all pages that a logged-out user can't see."""
return [view_announcements, view_forum, view_course, view_assessments,
view_unit, view_activity, view_my_profile, view_registration]
@classmethod
def get_enrolled_student_allowed_pages(cls):
"""Returns all pages that a logged-in, enrolled student can see."""
return [view_announcements, view_forum, view_course,
view_assessments, view_unit, view_activity, view_my_profile]
@classmethod
def get_enrolled_student_denied_pages(cls):
"""Returns all pages that a logged-in, enrolled student can't see."""
return [view_registration, view_preview]
@classmethod
def get_unenrolled_student_allowed_pages(cls):
"""Returns all pages that a logged-in, unenrolled student can see."""
return [view_registration, view_preview]
@classmethod
def get_unenrolled_student_denied_pages(cls):
"""Returns all pages that a logged-in, unenrolled student can't see."""
all = Permissions.get_enrolled_student_allowed_pages()
for allowed in Permissions.get_unenrolled_student_allowed_pages():
if allowed in all:
all.remove(allowed)
return all
@classmethod
def assert_logged_out(cls, browser):
"""Check that current user can see only what is allowed to a logged-out user."""
AssertNoneFail(browser, Permissions.get_logged_out_allowed_pages())
AssertAllFail(browser, Permissions.get_logged_out_denied_pages())
@classmethod
def assert_enrolled(cls, browser):
"""Check that current user can see only what is allowed to an enrolled student."""
AssertNoneFail(browser, Permissions.get_enrolled_student_allowed_pages())
AssertAllFail(browser, Permissions.get_enrolled_student_denied_pages())
@classmethod
def assert_unenrolled(cls, browser):
"""Check that current user can see only what is allowed to an unenrolled student."""
AssertNoneFail(browser, Permissions.get_unenrolled_student_allowed_pages())
AssertAllFail(browser, Permissions.get_unenrolled_student_denied_pages())
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: psimakov@google.com (Pavel Simakov)
import os
from google.appengine.ext import db
from google.appengine.api import memcache
# determine if we run in production environment
PRODUCTION_MODE = not os.environ.get(
'SERVER_SOFTWARE', 'Development').startswith('Development')
# set the default amount of time to cache the items for in memcache
DEFAULT_CACHE_TTL_SECS = 60 * 60
# enable memcache caching, but only if we run in the production mode
IS_CACHE_ENABLED = PRODUCTION_MODE
class MemcacheManager(object):
"""Class that consolidates all our memcache operations."""
@classmethod
def enabled(cls):
return IS_CACHE_ENABLED
@classmethod
def get(cls, key):
"""Gets an item from memcache if memcache is enabled."""
if MemcacheManager.enabled():
return memcache.get(key)
else:
return None
@classmethod
def set(cls, key, value):
"""Sets an item in memcache if memcache is enabled."""
if MemcacheManager.enabled():
memcache.set(key, value, DEFAULT_CACHE_TTL_SECS)
@classmethod
def delete(cls, key):
"""Deletes an item from memcache if memcache is enabled."""
if MemcacheManager.enabled():
memcache.delete(key)
class Student(db.Model):
"""Student profile."""
enrolled_date = db.DateTimeProperty(auto_now_add=True)
name = db.StringProperty()
is_enrolled = db.BooleanProperty()
# each of the following is a string representation of a JSON dict
answers = db.TextProperty()
scores = db.TextProperty()
def put(self):
"""Do the normal put() and also add the object to memcache."""
super(Student, self).put()
MemcacheManager.set(self.key().name(), self)
def delete(self):
"""Do the normal delete() and also remove the object from memcache."""
super(Student, self).delete()
MemcacheManager.delete(self.key().name())
@classmethod
def get_by_email(cls, email):
return Student.get_by_key_name(email.encode('utf8'))
@classmethod
def get_enrolled_student_by_email(cls, email):
student = MemcacheManager.get(email)
if not student:
student = Student.get_by_email(email)
MemcacheManager.set(email, student)
if student and student.is_enrolled:
return student
else:
return None
class Unit(db.Model):
"""Unit metadata."""
id = db.IntegerProperty()
type = db.StringProperty()
unit_id = db.StringProperty()
title = db.StringProperty()
release_date = db.StringProperty()
now_available = db.BooleanProperty()
@classmethod
def get_units(cls):
units = MemcacheManager.get('units')
if units is None:
units = Unit.all().order('id')
MemcacheManager.set('units', units)
return units
@classmethod
def get_lessons(cls, unit_id):
lessons = MemcacheManager.get('lessons' + str(unit_id))
if lessons is None:
lessons = Lesson.all().filter('unit_id =', unit_id).order('id')
MemcacheManager.set('lessons' + str(unit_id), lessons)
return lessons
class Lesson(db.Model):
"""Lesson metadata."""
unit_id = db.IntegerProperty()
id = db.IntegerProperty()
title = db.StringProperty()
objectives = db.TextProperty()
video = db.TextProperty()
notes = db.TextProperty()
slides = db.TextProperty()
duration = db.StringProperty()
activity = db.StringProperty()
activity_title = db.StringProperty()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: sll@google.com (Sean Lip)
"""Helper functions to work with various models."""
import json, logging
# returns a dict where the key is the assessment/summary name,
# and the value is the assessment/summary score (if available)
def getAllScores(student):
if not student.scores:
return {}
else:
return json.loads(student.scores)
def dictGet(dict_as_string, my_key):
if not dict_as_string:
return None
else:
return json.loads(dict_as_string).get(my_key)
# returns the answer array corresponding to the given assessment, or None if
# not found
def getAnswer(student, assessment_name):
return dictGet(student.answers, assessment_name)
# (caller must call student.put() to commit)
# NB: this does not do any type-checking on 'answer'; it just stores whatever
# is passed in.
def setAnswer(student, assessment_name, answer):
if not student.answers:
score_dict = {}
else:
score_dict = json.loads(student.answers)
score_dict[assessment_name] = answer
student.answers = json.dumps(score_dict)
# returns the score corresponding to the given assessment, or None if not found
# (caller must cast appropriately)
def getScore(student, assessment_name):
return dictGet(student.scores, assessment_name)
# (caller must call student.put() to commit)
# NB: this does not do any type-checking on 'score'; it just stores whatever
# is passed in.
def setScore(student, assessment_name, score):
if not student.scores:
score_dict = {}
else:
score_dict = json.loads(student.scores)
score_dict[assessment_name] = score
student.scores = json.dumps(score_dict)
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: psimakov@google.com (Pavel Simakov)
"""Custom configurations and functions for Google App Engine."""
import os
# this is the official location of this app for computing of all relative paths
BUNDLE_ROOT = os.path.dirname(__file__)
from google.appengine.api import namespace_manager
def namespace_manager_default_namespace_for_request():
"""Set a namespace appropriate for this request."""
from controllers import sites
return sites.ApplicationContext.getNamespaceName()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging, urlparse, webapp2, jinja2
from models.models import Student, Unit, MemcacheManager
from google.appengine.api import users
from google.appengine.ext import db
from models.utils import getAllScores
# FIXME: Set MAX_CLASS_SIZE to a positive integer if you want to restrict the
# course size to a maximum of N students. Note, though, that counting the students
# in this way uses a lot of database calls that may cost you quota and money.
# TODO(psimakov): we must use sharded counter and not Student.all().count()
MAX_CLASS_SIZE = None
# a template place holder for the student email
USER_EMAIL_PLACE_HOLDER = "{{ email }}"
"""A handler that is aware of the application context."""
class ApplicationHandler(webapp2.RequestHandler):
def __init__(self):
super(ApplicationHandler, self).__init__()
self.templateValue = {}
def appendBase(self):
"""Append current course <base> to template variables."""
slug = self.app_context.getSlug()
if not slug.endswith('/'):
slug = '%s/' % slug
self.templateValue['gcb_course_base'] = slug
def getTemplate(self, templateFile):
"""Computes the location of template files for the current namespace."""
self.appendBase()
template_dir = self.app_context.getTemplateHome()
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(template_dir))
return jinja_environment.get_template(templateFile)
def is_absolute(self, url):
return bool(urlparse.urlparse(url).scheme)
def redirect(self, location):
"""Takes relative 'location' and adds current namespace URL prefix to it."""
if not self.is_absolute(location):
if self.app_context.getSlug() and self.app_context.getSlug() != '/':
location = '%s%s' % (self.app_context.getSlug(), location)
super(ApplicationHandler, self).redirect(location)
"""
Base handler
"""
class BaseHandler(ApplicationHandler):
def getUser(self):
"""Validate user exists."""
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
else:
return user
def personalizePageAndGetUser(self):
"""If the user exists, add email and logoutUrl fields to the navbar template."""
user = self.getUser()
if user:
self.templateValue['email'] = user.email()
self.templateValue['logoutUrl'] = users.create_logout_url("/")
return user
def render(self, templateFile):
template = self.getTemplate(templateFile)
self.response.out.write(template.render(self.templateValue))
"""
Student Handler
"""
class StudentHandler(ApplicationHandler):
def get_page(cls, page_name, content_lambda):
"""Get page from cache or create page on demand."""
content = MemcacheManager.get(page_name)
if not content:
logging.info('Cache miss: ' + page_name)
content = content_lambda()
MemcacheManager.set(page_name, content)
return content
def getOrCreatePage(self, page_name, handler):
def content_lambda():
return self.delegateTo(handler)
return self.get_page(page_name, content_lambda)
def delegateTo(self, handler):
""""Run another handler using system identity.
This method is called when a dynamic page template cannot be found in either
memcache or the datastore. We now need to create this page using a handler
passed to this method. The handler must run with the exact same request
parameters as self, but we need to replace current user and the response."""
# create custom function for replacing the current user
def get_placeholder_user():
return users.User(email = USER_EMAIL_PLACE_HOLDER)
# create custom response.out to intercept output
class StringWriter:
def __init__(self):
self.buffer = []
def write(self, text):
self.buffer.append(text)
def getText(self):
return "".join(self.buffer)
class BufferedResponse:
def __init__(self):
self.out = StringWriter()
# configure handler request and response
handler.app_context = self.app_context
handler.request = self.request
handler.response = BufferedResponse()
# substitute current user with the system account and run the handler
get_current_user_old = users.get_current_user
try:
user = users.get_current_user()
if user:
users.get_current_user = get_placeholder_user
handler.get()
finally:
users.get_current_user = get_current_user_old
return handler.response.out.getText()
def getEnrolledStudent(self):
user = users.get_current_user()
if user:
return Student.get_enrolled_student_by_email(user.email())
else:
self.redirect(users.create_login_url(self.request.uri))
def serve(self, page, email=None):
# Search and substitute placeholders for current user email and
# overall_score (if applicable) in the cached page before serving them to
# users.
html = page
if email:
html = html.replace(USER_EMAIL_PLACE_HOLDER, email)
self.response.out.write(html)
"""
Handler for viewing course preview
"""
class CoursePreviewHandler(BaseHandler):
def get(self):
user = users.get_current_user()
if not user:
self.templateValue['loginUrl'] = users.create_login_url('/')
else:
self.templateValue['email'] = user.email()
self.templateValue['logoutUrl'] = users.create_logout_url("/")
self.templateValue['navbar'] = {'course': True}
self.templateValue['units'] = Unit.get_units()
if user and Student.get_enrolled_student_by_email(user.email()):
self.redirect('/course')
else:
self.render('preview.html')
"""
Handler for course registration
"""
class RegisterHandler(BaseHandler):
def get(self):
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
self.templateValue['navbar'] = {'registration': True}
# Check for existing registration -> redirect to course page
student = Student.get_enrolled_student_by_email(user.email())
if student:
self.redirect('/course')
else:
self.render('register.html')
def post(self):
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
if (MAX_CLASS_SIZE and Student.all(keys_only=True).count() >= MAX_CLASS_SIZE):
self.templateValue['course_status'] = 'full'
else:
# Create student record
name = self.request.get('form01')
# create new or re-enroll old student
student = Student.get_by_email(user.email())
if student:
if not student.is_enrolled:
student.is_enrolled = True
student.name = name
else:
student = Student(key_name=user.email(), name=name, is_enrolled=True)
student.put()
# Render registration confirmation page
self.templateValue['navbar'] = {'registration': True}
self.render('confirmation.html')
"""
Handler for forum page
"""
class ForumHandler(BaseHandler):
def get(self):
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
self.templateValue['navbar'] = {'forum': True}
self.render('forum.html')
"""
Handler for rendering answer submission confirmation page
"""
class AnswerConfirmationHandler(BaseHandler):
def __init__(self, type):
super(AnswerConfirmationHandler, self).__init__()
self.type = type
def get(self):
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
self.templateValue['navbar'] = {'course': True}
self.templateValue['assessment'] = self.type
self.render('test_confirmation.html')
"""
This function handles the click to 'My Profile' link in the nav bar
"""
class StudentProfileHandler(BaseHandler):
def get(self):
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
#check for existing registration -> redirect to registration page
student = Student.get_enrolled_student_by_email(user.email())
if not student:
self.redirect('/preview')
return
self.templateValue['navbar'] = {}
self.templateValue['student'] = student
self.templateValue['scores'] = getAllScores(student)
self.render('student_profile.html')
"""
This function handles edits to student records by students
"""
class StudentEditStudentHandler(BaseHandler):
def get(self):
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
self.templateValue['navbar'] = {}
e = self.request.get('email')
# Check for existing registration -> redirect to course page
student = Student.get_by_email(e)
if student == None:
self.templateValue['student'] = None
self.templateValue['errormsg'] = 'Error: Student with email ' + e + ' can not be found on the roster.'
else:
self.templateValue['student'] = student
self.templateValue['scores'] = getAllScores(student)
self.render('student_profile.html')
def post(self):
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
# Update student record
e = self.request.get('email')
n = self.request.get('name')
student = Student.get_by_email(e)
if student:
if (n != ''):
student.name = n
student.put()
self.redirect('/student/editstudent?email='+e)
"""
Handler for Announcements
"""
class AnnouncementsHandler(BaseHandler):
def get(self):
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
student = Student.get_enrolled_student_by_email(user.email())
if not student:
self.redirect('/preview')
return
self.templateValue['navbar'] = {'announcements': True}
self.render('announcements.html')
"""
Handler for students to unenroll themselves
"""
class StudentUnenrollHandler(BaseHandler):
def get(self):
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
student = Student.get_enrolled_student_by_email(user.email())
if student:
self.templateValue['student'] = student
self.templateValue['navbar'] = {'registration': True}
self.render('unenroll_confirmation_check.html')
def post(self):
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
# Update student record
student = Student.get_by_email(user.email())
if student and student.is_enrolled:
student.is_enrolled = False
student.put()
self.templateValue['navbar'] = {'registration': True}
self.render('unenroll_confirmation.html')
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: psimakov@google.com (Pavel Simakov)
"""All handlers here either serve the cached pages or delegate to real handlers."""
import logging, json
from models.models import Student
import lessons, utils
from utils import StudentHandler
from google.appengine.api import users
"""
Handler for serving course page.
"""
class CourseHandler(StudentHandler):
def get(self):
student = self.getEnrolledStudent()
if student:
page = self.getOrCreatePage('course_page', lessons.CourseHandler())
self.serve(page, student.key().name())
else:
self.redirect('/preview')
"""
Handler for serving class page.
"""
class UnitHandler(StudentHandler):
def get(self):
# Extract incoming args
c = self.request.get('unit')
if not c:
class_id = 1
else:
class_id = int(c)
l = self.request.get('lesson')
if not l:
lesson_id = 1
else:
lesson_id = int(l)
# Check for enrollment status
student = self.getEnrolledStudent()
if student:
page = self.getOrCreatePage(
'lesson%s%s_page' % (class_id, lesson_id), lessons.UnitHandler())
self.serve(page, student.key().name())
else:
self.redirect('/register')
"""
Handler for serving activity page.
"""
class ActivityHandler(StudentHandler):
def get(self):
# Extract incoming args
c = self.request.get('unit')
if not c:
class_id = 1
else:
class_id = int(c)
l = self.request.get('lesson')
if not l:
lesson_id = 1
else:
lesson_id = int(l)
# Check for enrollment status
student = self.getEnrolledStudent()
if student:
page = self.getOrCreatePage(
'activity' + str(class_id) + str(lesson_id) + '_page', lessons.ActivityHandler())
self.serve(page, student.key().name())
else:
self.redirect('/register')
"""
Handler for serving assessment page.
"""
class AssessmentHandler(StudentHandler):
def get(self):
# Extract incoming args
n = self.request.get('name')
if not n:
n = 'Pre'
name = n
# Check for enrollment status
student = self.getEnrolledStudent()
if student:
page = self.getOrCreatePage(
'assessment' + name + '_page', lessons.AssessmentHandler())
self.serve(page, student.key().name())
else:
self.redirect('/register')
"""
Handler for serving forum page.
"""
class ForumHandler(StudentHandler):
def get(self):
# Check for enrollment status
student = self.getEnrolledStudent()
if student:
page = self.getOrCreatePage('forum_page', utils.ForumHandler())
self.serve(page, student.key().name())
else:
self.redirect('/register')
"""
Handler for serving preview page.
"""
class PreviewHandler(StudentHandler):
def get(self):
user = users.get_current_user()
if user:
if Student.get_enrolled_student_by_email(user.email()):
self.redirect('/course')
else:
page = self.getOrCreatePage('loggedin_preview_page', utils.CoursePreviewHandler())
self.serve(page, user.email())
else:
page = self.getOrCreatePage('anonymous_preview_page', utils.CoursePreviewHandler())
self.serve(page)
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: psimakov@google.com (Pavel Simakov)
"""Enables hosting of multiple courses in one application instance.
We used to allow hosting of only one course in one Google App Engine instance. Now
we allow hosting of many courses simultaneously. To configure multiple courses one
must set an environment variable in app.yaml file, for example:
...
env_variables:
GCB_COURSES_CONFIG: 'course:/coursea:/courses/a, course:/courseb:/courses/b'
...
This variable holds a ',' separated list of rewrite rules. Each rewrite rule has
three ':' separated parts: the word 'course', the URL prefix, and the file system
location for the site files. The fourth, optional part, is a course namespace name.
The URL prefix specifies, how will the course URL appear in the browser. In the
example above, the courses will be mapped to http://www.example.com[/coursea] and
http://www.example.com[/courseb].
The file system location of the files specifies, which files to serve for the course.
For each course we expect three sub-folders: 'assets', 'views', and 'data'. The
'data' folder must contain the CSV files that define the course layout, the 'assets'
and 'views' should contain the course specific files and jinja2 templates respectively.
In the example above, the course files are expected to be placed into folders
'/courses/a' and '/courses/b' of your Google App Engine installation respectively.
By default Course Builder handles static '/assets' files using a custom handler.
You may choose to handle '/assets' files of your course as 'static' files using
Google App Engine handler. You can do so by creating a new static file handler
entry in your app.yaml and placing it before our main course handler.
If you have an existing course developed using Course Builder and do NOT want to
host multiple courses, there is nothing for you to do. A following default rule is
silently created for you:
...
env_variables:
GCB_COURSES_CONFIG: 'course:/:/'
...
It sets the '/' as the base URL for the course, uses root folder of your Google App
Engine installation to look for course /assets/..., /data/..., and /views/... and
uses blank datastore and memcache namespace. All in all, everything behaves just as
it did in the prior version of Course Builder when only one course was supported.
If you have existing course developed using Course Builder and DO want to start
hosting multiple courses here are the steps. First, define the courses configuration
environment variable as described above. Second, copy existing 'assets', 'data' and
'views' folders of your course into the new location, for example '/courses/mycourse'.
Third, change your bulkloader commands to use the new CSV data file locations and
add a 'namespace' parameter, here is an example:
...
echo Uploading units.csv
$GOOGLE_APP_ENGINE_HOME/appcfg.py upload_data \
--url=http://localhost:8080/_ah/remote_api \
--config_file=experimental/coursebuilder/bulkloader.yaml \
--filename=experimental/coursebuilder/courses/a/data/unit.csv \
--kind=Unit \
--namespace=gcb-courses-a
echo Uploading lessons.csv
$GOOGLE_APP_ENGINE_HOME/appcfg.py upload_data \
--url=http://localhost:8080/_ah/remote_api \
--config_file=experimental/coursebuilder/bulkloader.yaml \
--filename=experimental/coursebuilder/courses/a/data/lesson.csv \
--kind=Lesson \
--namespace=gcb-courses-a
...
If you have an existing course built on a previous version of Course Builder and you
now decided to use new URL prefix, which is not '/', you will need to update your
old course html template and JavaScript files. You typically would have to make two
modifications. First, replace all absolute URLs with the relative URLs. For example,
if you had <a href='/forum'>..</a>, you will need to replace it with <a href='forum'>..</a>.
Second, you need to add <base> tag at the top of you course 'base.html' and
'base_registration.html' files, like this:
...
<head>
<base href="{{ gcb_course_base }}" />
...
Current Course Builder release already has all these modifications.
Note, that each 'course' runs in a separate Google App Engine namespace. The name
of the namespace is derived from the course files location. In the example above,
the course files are stored in the folder '/courses/a', which be mapped to the
namespace name 'gcb-courses-a'. The namespaces can't contain '/', so we replace them
with '-' and prefix the namespace with the project abbreviation 'gcb'. Remember these
namespace names, you will need to use them if/when accessing server administration
panel, viewing objects in the datastore, etc. Don't move the files to another folder
after your course starts as a new folder name will create a new namespace name and
old data will no longer be used. You are free to rename the course URL prefix at any
time. Once again, if you are not hosting multiple courses, your course will run in
a default namespace (None).
Good luck!
"""
import appengine_config, logging, mimetypes, os, threading, webapp2
from google.appengine.api import namespace_manager
# the name of environment variable that holds rewrite rule definitions
GCB_COURSES_CONFIG_ENV_VAR_NAME = 'GCB_COURSES_CONFIG'
# base name for all course namespaces
GCB_BASE_COURSE_NAMESPACE = 'gcb-course'
# these folder names are reserved
GCB_ASSETS_FOLDER_NAME = '/assets'
GCB_VIEWS_FOLDER_NAME = '/views'
# supported site types
SITE_TYPE_COURSE = 'course'
# default 'Cache-Control' HTTP header for static files
DEFAULT_CACHE_CONTROL_HEADER_VALUE = 'public, max-age=600'
# enable debug output
DEBUG_INFO = False
# thread local storage for current request PATH_INFO
PATH_INFO_THREAD_LOCAL = threading.local()
def hasPathInfo():
"""Checks if PATH_INFO is defined for the thread local."""
return hasattr(PATH_INFO_THREAD_LOCAL, 'path')
def setPathInfo(path):
"""Stores PATH_INFO in thread local."""
if not path:
raise Exception('Use \'unset()\ instead.')
if hasPathInfo():
raise Exception("Expected no path set.")
PATH_INFO_THREAD_LOCAL.path = path
def getPathInfo():
"""Gets PATH_INFO from thread local."""
return PATH_INFO_THREAD_LOCAL.path
def unsetPathInfo():
"""Removed PATH_INFO from thread local."""
if not hasPathInfo():
raise Exception("Expected valid path already set.")
del PATH_INFO_THREAD_LOCAL.path
def debug(message):
if DEBUG_INFO:
logging.info(message)
def makeDefaultRule():
"""By default, we support one course in the root folder in the None namespace."""
return ApplicationContext('course', '/', '/', None)
def getAllRules():
"""Reads all rewrite rule definitions from environment variable."""
default = makeDefaultRule()
if not GCB_COURSES_CONFIG_ENV_VAR_NAME in os.environ:
return [default]
var_string = os.environ[GCB_COURSES_CONFIG_ENV_VAR_NAME]
if not var_string:
return [default]
slugs = {}
namespaces = {}
all = []
for rule in var_string.split(','):
rule = rule.strip()
if len(rule) == 0:
continue
parts = rule.split(':')
# validate length
if len(parts) < 3:
raise Exception(
'Expected rule definition in a form of \'type:slug:folder[:ns]\', got %s: ' % rule)
# validate type
if parts[0] != SITE_TYPE_COURSE:
raise Exception('Expected \'%s\', found: \'%s\'.' % (SITE_TYPE_COURSE, parts[0]))
type = parts[0]
# validate slug
if parts[1] in slugs:
raise Exception('Slug already defined: %s.' % parts[1])
slugs[parts[1]] = True
slug = parts[1]
# validate folder name
folder = parts[2]
# validate or derive namespace
namespace = None
if len(parts) == 4:
namespace = parts[3]
else:
if folder == '/' or folder == '':
namespace = None
else:
namespace = '%s%s' % (GCB_BASE_COURSE_NAMESPACE, folder.replace('/', '-'))
if namespace in namespaces:
raise Exception('Namespace already defined: %s.' % namespace)
namespaces[namespace] = True
all.append(ApplicationContext(type, slug, folder, namespace))
return all
def getRuleForCurrentRequest():
"""Chooses rule that matches current request context path."""
# get path if defined
if not hasPathInfo():
return None
path = getPathInfo()
# get all rules
rules = getAllRules()
# match a path to a rule
# TODO(psimakov): linear search is unacceptable
for rule in rules:
if path == rule.getSlug() or path.startswith(
'%s/' % rule.getSlug()) or rule.getSlug() == '/':
return rule
debug('No mapping for: %s' % path)
return None
def unprefix(path, prefix):
"""Removed the prefix from path, appends '/' if empty string results."""
if not path.startswith(prefix):
raise Exception('Not prefixed.')
if prefix != '/':
path = path[len(prefix):]
if path == '':
path = '/'
return path
def namespace_manager_default_namespace_for_request():
"""Set a namespace appropriate for this request."""
return ApplicationContext.getNamespaceName()
"""A class that handles serving of static resources located on the file system."""
class AssetHandler(webapp2.RequestHandler):
def __init__(self, filename):
filename = os.path.abspath(filename).replace('//', '/')
if not filename.startswith('/'):
raise Exception('Expected absolute path.')
filename = filename[1:]
self.filename = os.path.join(appengine_config.BUNDLE_ROOT, filename)
def getMimeType(self, filename, default='application/octet-stream'):
guess = mimetypes.guess_type(filename)[0]
if guess is None:
return default
return guess
def get(self):
debug('File: %s' % self.filename)
if not os.path.isfile(self.filename):
self.error(404)
self.response.headers['Cache-Control'] = DEFAULT_CACHE_CONTROL_HEADER_VALUE
self.response.headers['Content-Type'] = self.getMimeType(self.filename)
self.response.write(open(self.filename, 'r').read())
"""A class that contains an application context for request/response."""
class ApplicationContext(object):
@classmethod
def getNamespaceName(cls):
"""A name of the namespace (NDB, memcache, etc.) to use for this request."""
rule = getRuleForCurrentRequest()
if rule:
return rule.namespace
return None
def __init__(self, type, slug, homefolder, namespace):
self.slug = slug
self.homefolder = homefolder
self.type = type
self.namespace = namespace
def getHomeFolder(self):
"""A folder with the assets belonging to this context."""
return self.homefolder
def getSlug(self):
"""A common context path for all URLs in this context ('/courses/mycourse')."""
return self.slug
def getTemplateHome(self):
if self.getHomeFolder() == '/':
template_home = GCB_VIEWS_FOLDER_NAME
else:
template_home = '%s%s' % (self.getHomeFolder(), GCB_VIEWS_FOLDER_NAME)
template_home = os.path.abspath(template_home)
if not template_home.startswith('/'):
raise Exception('Expected absolute path.')
template_home = template_home[1:]
debug('Template home: %s' % template_home)
return os.path.join(appengine_config.BUNDLE_ROOT, template_home)
"""A class that handles dispatching of all URL's to proper handlers."""
class ApplicationRequestHandler(webapp2.RequestHandler):
@classmethod
def bind(cls, urls):
urls_map = {}
ApplicationRequestHandler.urls = {}
for url in urls:
urls_map[url[0]] = url[1]
ApplicationRequestHandler.urls_map = urls_map
def getHandler(self):
"""Finds a routing rule suitable for this request."""
rule = getRuleForCurrentRequest()
if not rule:
return None
path = getPathInfo()
if not path:
return None
return self.getHandlerForCourseType(rule, unprefix(path, rule.getSlug()))
def getHandlerForCourseType(self, context, path):
# handle static assets here
absolute_path = os.path.abspath(path)
if absolute_path.startswith('%s/' % GCB_ASSETS_FOLDER_NAME):
handler = AssetHandler('%s%s' % (context.getHomeFolder(), absolute_path))
handler.request = self.request
handler.response = self.response
handler.app_context = context
debug('Course asset: %s' % absolute_path)
return handler
# handle all dynamic handlers here
if path in ApplicationRequestHandler.urls_map:
factory = ApplicationRequestHandler.urls_map[path]
handler = factory()
handler.app_context = context
handler.request = self.request
handler.response = self.response
debug('Handler: %s > %s' %(path, handler.__class__.__name__))
return handler
return None
def get(self, path):
try:
setPathInfo(path)
debug('Namespace: %s' % namespace_manager.get_namespace())
handler = self.getHandler()
if not handler:
self.error(404)
else:
handler.get()
finally:
unsetPathInfo()
def post(self, path):
try:
setPathInfo(path)
debug('Namespace: %s' % namespace_manager.get_namespace())
handler = self.getHandler()
if not handler:
self.error(404)
else:
handler.post()
finally:
unsetPathInfo()
def AssertMapped(src, dest):
try:
setPathInfo(src)
rule = getRuleForCurrentRequest()
if not dest:
assert rule == None
else:
assert rule.getSlug() == dest
finally:
unsetPathInfo()
def AssertHandled(src, targetHandler):
try:
setPathInfo(src)
handler = ApplicationRequestHandler().getHandler()
if handler == None and targetHandler == None:
return None
assert isinstance(handler, targetHandler)
return handler
finally:
unsetPathInfo()
def AssertFails(func):
success = False
try:
func()
success = True
except Exception:
pass
if success: raise Exception()
def TestUnprefix():
assert unprefix('/', '/') == '/'
assert unprefix('/a/b/c', '/a/b') == '/c'
assert unprefix('/a/b/index.html', '/a/b') == '/index.html'
assert unprefix('/a/b', '/a/b') == '/'
def TestRuleDefinitions():
"""Test various rewrite rule definitions."""
os.environ = {}
# check default site is created when none specified explicitly
assert len(getAllRules()) == 1
# test empty definition is ok
os.environ[GCB_COURSES_CONFIG_ENV_VAR_NAME] = ''
assert len(getAllRules()) == 1
# test one rule parsing
os.environ[GCB_COURSES_CONFIG_ENV_VAR_NAME] = 'course:/google/pswg:/sites/pswg'
rules = getAllRules()
assert len(getAllRules()) == 1
rule = rules[0]
assert rule.getSlug() == '/google/pswg'
assert rule.getHomeFolder() == '/sites/pswg'
# test two rule parsing
os.environ[GCB_COURSES_CONFIG_ENV_VAR_NAME] = 'course:/a/b:/c/d, course:/e/f:/g/h'
assert len(getAllRules()) == 2
# test two of the same slugs are not allowed
os.environ[GCB_COURSES_CONFIG_ENV_VAR_NAME] = 'foo:/a/b:/c/d, bar:/a/b:/c/d'
AssertFails(getAllRules)
# test only course|static is supported
os.environ[GCB_COURSES_CONFIG_ENV_VAR_NAME] = 'foo:/a/b:/c/d, bar:/e/f:/g/h'
AssertFails(getAllRules)
# test namespaces
setPathInfo('/')
os.environ[GCB_COURSES_CONFIG_ENV_VAR_NAME] = 'course:/:/c/d'
assert ApplicationContext.getNamespaceName() == 'gcb-course-c-d'
unsetPathInfo()
def TestUrlToRuleMapping():
"""Tests mapping of a URL to a rule."""
os.environ = {}
# default mapping
AssertMapped('/favicon.ico', '/')
AssertMapped('/assets/img/foo.png', '/')
# explicit mapping
os.environ[GCB_COURSES_CONFIG_ENV_VAR_NAME] = 'course:/a/b:/c/d, course:/e/f:/g/h'
AssertMapped('/a/b', '/a/b')
AssertMapped('/a/b/', '/a/b')
AssertMapped('/a/b/c', '/a/b')
AssertMapped('/a/b/c', '/a/b')
AssertMapped('/e/f', '/e/f')
AssertMapped('/e/f/assets', '/e/f')
AssertMapped('/e/f/views', '/e/f')
AssertMapped('e/f', None)
AssertMapped('foo', None)
def TestUrlToHandlerMappingForCourseType():
"""Tests mapping of a URL to a handler for course type."""
os.environ = {}
# setup rules
os.environ[GCB_COURSES_CONFIG_ENV_VAR_NAME] = 'course:/a/b:/c/d, course:/e/f:/g/h'
# setup helper classes
class FakeHandler0():
def __init__(self):
self.app_context = None
class FakeHandler1():
def __init__(self):
self.app_context = None
class FakeHandler2():
def __init__(self):
self.app_context = None
# setup handler
handler0 = FakeHandler0
handler1 = FakeHandler1
handler2 = FakeHandler2
urls = [('/', handler0), ('/foo', handler1), ('/bar', handler2)]
ApplicationRequestHandler.bind(urls)
# test proper handler mappings
AssertHandled('/a/b', FakeHandler0)
AssertHandled('/a/b/', FakeHandler0)
AssertHandled('/a/b/foo', FakeHandler1)
AssertHandled('/a/b/bar', FakeHandler2)
# test assets mapping
handler = AssertHandled('/a/b/assets/img/foo.png', AssetHandler)
assert handler.app_context.getTemplateHome().endswith(
'experimental/coursebuilder/c/d/views')
# this is allowed as we don't go out of /assets/...
handler = AssertHandled('/a/b/assets/foo/../models/models.py', AssetHandler)
assert handler.filename.endswith(
'experimental/coursebuilder/c/d/assets/models/models.py')
# this is not allowed as we do go out of /assets/...
AssertHandled('/a/b/assets/foo/../../models/models.py', None)
# test negative cases
AssertHandled('/foo', None)
AssertHandled('/baz', None)
# site 'views' and 'data' are not accessible
AssertHandled('/a/b/view/base.html', None)
AssertHandled('/a/b/data/units.csv', None)
# default mapping
os.environ = {}
urls = [('/', handler0), ('/foo', handler1), ('/bar', handler2)]
# positive cases
AssertHandled('/', FakeHandler0)
AssertHandled('/foo', FakeHandler1)
AssertHandled('/bar', FakeHandler2)
handler = AssertHandled('/assets/js/main.js', AssetHandler)
assert handler.app_context.getTemplateHome().endswith(
'experimental/coursebuilder/views')
# negative cases
AssertHandled('/favicon.ico', None)
AssertHandled('/e/f/index.html', None)
AssertHandled('/foo/foo.css', None)
# clean up
ApplicationRequestHandler.bind([])
def TestSpecialChars():
os.environ = {}
# test namespace collisions are detected and is not allowed
os.environ[GCB_COURSES_CONFIG_ENV_VAR_NAME] = 'foo:/a/b:/c/d, bar:/a/b:/c-d'
AssertFails(getAllRules)
def RunAllUnitTests():
TestSpecialChars()
TestUnprefix()
TestRuleDefinitions()
TestUrlToRuleMapping()
TestUrlToHandlerMappingForCourseType()
if __name__ == '__main__':
DEBUG_INFO = True
RunAllUnitTests()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from models.models import Unit
from utils import BaseHandler
from google.appengine.api import users
"""
Handler for generating course page
"""
class CourseHandler(BaseHandler):
def get(self):
user = self.personalizePageAndGetUser()
if user:
self.templateValue['units'] = Unit.get_units()
self.templateValue['navbar'] = {'course': True}
self.render('course.html')
else:
self.redirect('/preview')
"""
Handler for generating class page
"""
class UnitHandler(BaseHandler):
def get(self):
# Set template values for user
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
# Extract incoming args
c = self.request.get('unit')
if not c:
unit_id = 1
else:
unit_id = int(c)
self.templateValue['unit_id'] = unit_id
l = self.request.get('lesson')
if not l:
lesson_id = 1
else:
lesson_id = int(l)
self.templateValue['lesson_id'] = lesson_id
# Set template values for a unit and its lesson entities
for unit in Unit.get_units():
if unit.unit_id == str(unit_id):
self.templateValue['units'] = unit
lessons = Unit.get_lessons(unit_id)
self.templateValue['lessons'] = lessons
# Set template values for nav bar
self.templateValue['navbar'] = {'course': True}
# Set template values for back and next nav buttons
if lesson_id == 1:
self.templateValue['back_button_url'] = ''
elif lessons[lesson_id - 2].activity:
self.templateValue['back_button_url'] = '/activity?unit=' + str(unit_id) + '&lesson=' + str(lesson_id - 1)
else:
self.templateValue['back_button_url'] = '/unit?unit=' + str(unit_id) + '&lesson=' + str(lesson_id - 1)
if lessons[lesson_id - 1].activity:
self.templateValue['next_button_url'] = '/activity?unit=' + str(unit_id) + '&lesson=' + str(lesson_id)
elif lesson_id == lessons.count():
self.templateValue['next_button_url'] = ''
else:
self.templateValue['next_button_url'] = '/unit?unit=' + str(unit_id) + '&lesson=' + str(lesson_id + 1)
self.render('unit.html')
"""
Handler for generating activity page.
"""
class ActivityHandler(BaseHandler):
def get(self):
# Set template values for user
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
# Extract incoming args
c = self.request.get('unit')
if not c:
unit_id = 1
else:
unit_id = int(c)
self.templateValue['unit_id'] = unit_id
l = self.request.get('lesson')
if not l:
lesson_id = 1
else:
lesson_id = int(l)
self.templateValue['lesson_id'] = lesson_id
# Set template values for a unit and its lesson entities
for unit in Unit.get_units():
if unit.unit_id == str(unit_id):
self.templateValue['units'] = unit
lessons = Unit.get_lessons(unit_id)
self.templateValue['lessons'] = lessons
# Set template values for nav-x bar
self.templateValue['navbar'] = {'course': True}
# Set template values for back and next nav buttons
self.templateValue['back_button_url'] = '/unit?unit=' + str(unit_id) + '&lesson=' + str(lesson_id)
if lesson_id == lessons.count():
self.templateValue['next_button_url'] = ''
else:
self.templateValue['next_button_url'] = '/unit?unit=' + str(unit_id) + '&lesson=' + str(lesson_id + 1)
self.render('activity.html')
"""
Handler for generating assessment page
"""
class AssessmentHandler(BaseHandler):
def get(self):
# Set template values for user
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
# Extract incoming args
n = self.request.get('name')
if not n:
n = 'Pre'
self.templateValue['name'] = n
self.templateValue['navbar'] = {'course': True}
self.render('assessment.html')
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: pgbovine@google.com (Philip Guo)
"""Classes and methods to manage all aspects of student assessments."""
import json, logging
from models.models import Student
from models.utils import *
from utils import BaseHandler
from google.appengine.api import users
from google.appengine.ext import db
# Stores the assessment data in the student database entry
# and returns the (possibly-modified) assessment type,
# which the caller can use to render an appropriate response page.
#
# (caller must call student.put() to commit)
#
# FIXME: Course creators can edit this code to implement
# custom assessment scoring and storage behavior
def storeAssessmentData(student, assessment_type, score, answer):
# TODO: Note that the latest version of answers are always saved,
# but scores are only saved if they're higher than the previous
# attempt. This can lead to unexpected analytics behavior, so we
# should resolve this somehow.
setAnswer(student, assessment_type, answer)
existing_score = getScore(student, assessment_type)
# remember to cast to int for comparison
if (existing_score is None) or (score > int(existing_score)):
setScore(student, assessment_type, score)
# special handling for computing final score:
if assessment_type == 'postcourse':
midcourse_score = getScore(student, 'midcourse')
if midcourse_score is None:
midcourse_score = 0
else:
midcourse_score = int(midcourse_score)
if existing_score is None:
postcourse_score = score
else:
postcourse_score = int(existing_score)
if score > postcourse_score:
postcourse_score = score
# Calculate overall score based on a formula
overall_score = int((0.30*midcourse_score) + (0.70*postcourse_score))
# TODO: this changing of assessment_type is ugly ...
if overall_score >= 70:
assessment_type = 'postcourse_pass'
else:
assessment_type = 'postcourse_fail'
setScore(student, 'overall_score', overall_score)
return assessment_type
"""
Handler for saving assessment answers
"""
class AnswerHandler(BaseHandler):
# Find student entity and save answers
@db.transactional
def storeAssessmentTransaction(self, email, original_type, answer):
student = Student.get_by_email(email)
# TODO: considering storing as float for better precision
score = int(round(float(self.request.get('score'))))
assessment_type = storeAssessmentData(student, original_type, score, answer)
student.put()
return (student, assessment_type)
def post(self):
user = self.personalizePageAndGetUser()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
# Read in answers
answer = json.dumps(self.request.POST.items())
original_type = self.request.get('assessment_type')
# Check for enrollment status
student = Student.get_by_email(user.email())
if student and student.is_enrolled:
# Log answer submission
logging.info(student.key().name() + ':' + answer)
(student, assessment_type) = self.storeAssessmentTransaction(student.key().name(), original_type, answer)
# Serve the confirmation page
self.templateValue['navbar'] = {'course': True}
self.templateValue['assessment'] = assessment_type
self.templateValue['student_score'] = getScore(student, 'overall_score')
self.render('test_confirmation.html')
else:
self.redirect('/register')
| Python |
# Para hacer el ejecutable:
# python setup.py py2exe
#
"Creador de instalador para PyAfipWs"
__author__ = "Mariano Reingart (mariano@nsis.com.ar)"
__copyright__ = "Copyright (C) 2008 Mariano Reingart"
from distutils.core import setup
import py2exe
import sys
# includes for py2exe
includes=['email.generator', 'email.iterators', 'email.message', 'email.utils']
opts = {
'py2exe': {
'includes':includes,
'optimize':2}
}
setup( name = "PyAfipWs",
com_server = ["pyafipws"],
console=['rece.py', 'receb.py', 'recex.py', 'rg1361.py', 'wsaa.py', 'wsfex.py', 'wsbfe.py'],
options=opts,
)
| Python |
{'application':{'type':'Application',
'name':'Template',
'backgrounds': [
{'type':'Background',
'name':'bgTemplate',
'title':u'Aplicativo Factura Electr\xf3nica (PyRece)',
'size':(592, 487),
'menubar': {'type':'MenuBar',
'menus': [
{'type':'Menu',
'name':'menuConsultas',
'label':u'Consultas',
'items': [
{'type':'MenuItem',
'name':'menuConsultasLastCBTE',
'label':u'\xdalt. Cbte.',
},
{'type':'MenuItem',
'name':'menuConsultasLastID',
'label':u'\xdalt. ID',
},
{'type':'MenuItem',
'name':'menuConsultasGetCAE',
'label':u'Recuperar CAE',
},
]
},
{'type':'Menu',
'name':'menuAyuda',
'label':u'Ayuda',
'items': [
{'type':'MenuItem',
'name':'menuAyudaInstructivo',
'label':u'Instructivo',
},
{'type':'MenuItem',
'name':'menuAyudaAcercaDe',
'label':u'Acerca de',
},
{'type':'MenuItem',
'name':'menuAyudaLimpiar',
'label':u'Limpiar estado',
},
]
},
]
},
'components': [
{'type':'StaticText',
'name':'lblWebservice',
'position':(18, 10),
'text':u'Webservice:',
},
{'type':'Choice',
'name':'cboWebservice',
'position':(82, 5),
'size':(69, -1),
'items':[u'wsfe', u'wsfev1', u'wsfex'],
},
{'type':'Button',
'name':'btnGrabar',
'position':(504, 4),
'size':(60, -1),
'label':u'Grabar',
},
{'type':'Button',
'name':'btnMarcarTodo',
'position':(292, 163),
'label':u'Marcar Todo',
'toolTip':u'Seleccionar todas las facturas',
},
{'type':'Button',
'name':'btnAutorizarLote',
'position':(188, 163),
'label':u'Autorizar Lote',
'toolTip':u'Obtener CAE para todas las facturas',
},
{'type':'Button',
'name':'btnPrevisualizar',
'position':(395, 163),
'label':u'Previsualizar',
},
{'type':'Button',
'name':'btnAutenticar',
'position':(20, 163),
'label':u'Autenticar',
'toolTip':u'Iniciar Sesin en la AFIP',
},
{'type':'TextArea',
'name':'txtEstado',
'position':(20, 243),
'size':(534, 212),
'font':{'faceName': u'Sans', 'family': 'sansSerif', 'size': 8},
'text':u'\n',
},
{'type':'StaticText',
'name':'lblProgreso',
'position':(20, 194),
'text':u'Progreso:',
},
{'type':'StaticText',
'name':'lblEstado',
'position':(22, 219),
'text':u'Estado:',
},
{'type':'Button',
'name':'btnEnviar',
'position':(490, 163),
'size':(60, -1),
'label':u'Enviar',
'toolTip':u'Generar y enviar mails',
},
{'type':'Button',
'name':'btnExaminar',
'position':(370, 4),
'size':(69, -1),
'label':u'Examinar',
},
{'type':'TextField',
'name':'txtArchivo',
'position':(197, 5),
'size':(167, -1),
'text':u'facturas.csv',
},
{'type':'StaticText',
'name':'lblArchivo',
'position':(155, 10),
'text':u'Archivo:',
},
{'type':'Button',
'name':'btnCargar',
'position':(443, 4),
'size':(60, -1),
'label':u'Cargar',
},
{'type':'Button',
'name':'btnAutorizar',
'position':(104, 163),
'label':u'Autorizar',
'toolTip':u'Obtener CAE por cada factura',
},
{'type':'MultiColumnList',
'name':'lvwListado',
'position':(18, 53),
'size':(537, 106),
'backgroundColor':(255, 255, 255, 255),
'columnHeadings':[],
'font':{'faceName': u'Tahoma', 'family': 'sansSerif', 'size': 8},
'items':[],
'maxColumns':1000,
'rules':1,
},
{'type':'StaticText',
'name':'lblFacturas',
'position':(18, 35),
'size':(117, -1),
'text':u'Facturas:',
},
{'type':'Gauge',
'name':'pbProgreso',
'position':(89, 195),
'size':(477, 16),
'backgroundColor':(209, 194, 182, 255),
'layout':'horizontal',
'max':100,
'value':0,
},
] # end components
} # end background
] # end backgrounds
} }
| Python |
import wsaa
import os,sys
from subprocess import Popen, PIPE
from base64 import b64encode
def sign_tra(tra,cert,privatekey):
"Firmar PKCS#7 el TRA y devolver CMS (recortando los headers SMIME)"
# Firmar el texto (tra)
out = Popen(["openssl", "smime", "-sign",
"-signer", cert, "-inkey", privatekey,
"-outform","DER",
"-out", "cms.bin" , "-nodetach"],
stdin=PIPE,stdout=PIPE).communicate(tra)[0]
out = open("cms.bin","rb").read()
return b64encode(out)
tra = wsaa.create_tra("wsfex")
print tra
cms = sign_tra(tra,"reingart.crt","reingart.key")
print cms
open("tra.cms","w").write(cms)
ta = wsaa.call_wsaa(cms)
print ta
open("TA.xml","w").write(ta)
| Python |
#!/usr/bin/python
# -*- coding: latin-1 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
"Manejo de XML simple"
__author__ = "Mariano Reingart (mariano@nsis.com.ar)"
__copyright__ = "Copyright (C) 2008/009 Mariano Reingart"
__license__ = "LGPL 3.0"
__version__ = "1.0"
import xml.dom.minidom
DEBUG = False
class SimpleXMLElement(object):
"Clase para Manejo simple de XMLs (simil PHP)"
def __init__(self, text = None, elements = None, document = None, namespace = None, prefix=None):
self.__ns = namespace
self.__prefix = prefix
if text:
try:
self.__document = xml.dom.minidom.parseString(text)
except:
if DEBUG: print text
raise
self.__elements = [self.__document.documentElement]
else:
self.__elements = elements
self.__document = document
def addChild(self,tag,text=None,ns=True):
if not ns or not self.__ns:
if DEBUG: print "adding %s ns %s %s" % (tag, self.__ns,ns)
element = self.__document.createElement(tag)
else:
if DEBUG: print "adding %s ns %s %s" % (tag, self.__ns,ns)
element = self.__document.createElementNS(self.__ns, "%s:%s" % (self.__prefix, tag))
if text:
if isinstance(text, unicode):
element.appendChild(self.__document.createTextNode(text))
else:
element.appendChild(self.__document.createTextNode(str(text)))
self.__element.appendChild(element)
return SimpleXMLElement(
elements=[element],
document=self.__document,
namespace=self.__ns,
prefix=self.__prefix)
def asXML(self,filename=None):
return self.__document.toxml('UTF-8')
def __getattr__(self,tag):
try:
if self.__ns:
if DEBUG: print "searching %s by ns=%s" % (tag,self.__ns)
elements = self.__elements[0].getElementsByTagNameNS(self.__ns, tag)
if not self.__ns or not elements:
if DEBUG: print "searching %s " % (tag)
elements = self.__elements[0].getElementsByTagName(tag)
if not elements:
if DEBUG: print self.__elements[0].toxml()
raise AttributeError("Sin elementos")
return SimpleXMLElement(
elements=elements,
document=self.__document,
namespace=self.__ns,
prefix=self.__prefix)
except AttributeError, e:
raise AttributeError("Tag not found: %s (%s)" % (tag, str(e)))
def __iter__(self):
"Iterate over xml tags"
try:
for __element in self.__elements:
yield SimpleXMLElement(
elements=[__element],
document=self.__document,
namespace=self.__ns,
prefix=self.__prefix)
except:
raise
def __getitem__(self,item):
"Return xml attribute"
return getattr(self.__element, item)
def __contains__( self, item):
return self.__element.getElementsByTagName(item)
def __unicode__(self):
return self.__element.childNodes[0].data
def __str__(self):
if self.__element.childNodes:
rc = ""
for node in self.__element.childNodes:
if node.nodeType == node.TEXT_NODE:
rc = rc + node.data.encode("utf8","ignore")
return rc
return ''
def __repr__(self):
return repr(self.__str__())
def __int__(self):
return int(self.__str__())
def __float__(self):
try:
return float(self.__str__())
except:
raise IndexError(self.__element.toxml())
__element = property(lambda self: self.__elements[0])
if __name__ == "__main__":
span = SimpleXMLElement('<span><a href="google.com">google</a><prueba><i>1</i><float>1.5</float></prueba></span>')
print str(span.a)
print int(span.prueba.i)
print float(span.prueba.float)
span = SimpleXMLElement('<span><a href="google.com">google</a><a>yahoo</a><a>hotmail</a></span>')
for a in span.a:
print str(a)
span.addChild('a','altavista')
print span.asXML() | Python |
# Para hacer el ejecutable:
# python setup.py py2exe
#
"""
__version__ = "$Revision: 1.3 $"
__date__ = "$Date: 2005/04/05 18:44:54 $"
"""
__author__ = "Mariano Reingart (mariano@nsis.com.ar)"
__copyright__ = "Copyright (C) 2008 Mariano Reingart"
from distutils.core import setup
import py2exe
import sys
if sys.platform == 'darwin':
import py2app
buildstyle = 'app'
else:
import py2exe
buildstyle = 'windows'
# find pythoncard resources, to add as 'data_files'
import os
pycard_resources=[]
for filename in os.listdir('.'):
if filename.find('.rsrc.')>-1:
pycard_resources+=[filename]
# includes for py2exe
includes=[]
for comp in ['button','image','staticbox','radiogroup', 'imagebutton',
'statictext','textarea','textfield','passwordfield', 'checkbox',
'tree','multicolumnlist','list','gauge','choice',
]:
includes += ['PythonCard.components.'+comp]
print 'includes',includes
includes+=['email.generator', 'email.iterators', 'email.message', 'email.utils']
opts = {
'py2exe': {
'includes':includes,
'optimize':2}
}
setup( name = "PyRece",
data_files = [ (".", pycard_resources),
(".",["logo.png",]) ],
options=opts,
**{buildstyle: ["pyrece.py"],
'console': [{"script": "pyrece.py", "dest_base": "pyrece_consola"}]
}
) | Python |
#!/usr/bin/python
# -*- coding: latin-1 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
"Py2Exe extension to build NSIS Installers"
# Based on py2exe/samples/extending/setup.py:
# "A setup script showing how to extend py2exe."
# Copyright (c) 2000-2008 Thomas Heller, Mark Hammond, Jimmy Retzlaff
__author__ = "Mariano Reingart (reingart@gmail.com)"
__copyright__ = "Copyright (C) 2011 Mariano Reingart"
__license__ = "GPL 3.0"
import os
import sys
from py2exe.build_exe import py2exe
nsi_base_script = """\
; base.nsi
; WARNING: This script has been created by py2exe. Changes to this script
; will be overwritten the next time py2exe is run!
XPStyle on
Page license
Page directory
;Page components
Page instfiles
RequestExecutionLevel admin
LoadLanguageFile "${NSISDIR}\Contrib\Language files\English.nlf"
LoadLanguageFile "${NSISDIR}\Contrib\Language files\Spanish.nlf"
# set license page
LicenseText ""
LicenseData "licencia.txt"
LicenseForceSelection checkbox
; use the default string for the directory page.
DirText ""
Name "%(description)s"
OutFile "%(out_file)s"
;SetCompress off ; disable compression (testing)
SetCompressor /SOLID lzma
;InstallDir %(install_dir)s
InstallDir $PROGRAMFILES\%(install_dir)s
InstallDirRegKey HKLM "Software\%(reg_key)s" "Install_Dir"
Section %(name)s
; uninstall old version
ReadRegStr $R0 HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\%(reg_key)s" "UninstallString"
StrCmp $R0 "" notistalled
ExecWait '$R0 /S _?=$INSTDIR'
notistalled:
SectionIn RO
SetOutPath $INSTDIR
File /r dist\*.*
WriteRegStr HKLM SOFTWARE\%(reg_key)s "Install_Dir" "$INSTDIR"
; Write the uninstall keys for Windows
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\%(reg_key)s" "DisplayName" "%(description)s (solo eliminar)"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\%(reg_key)s" "UninstallString" "$INSTDIR\Uninst.exe"
WriteUninstaller "Uninst.exe"
;To Register a DLL
RegDLL "$INSTDIR\%(com_server)s"
SectionEnd
Section "Uninstall"
;To Unregister a DLL
UnRegDLL "$INSTDIR\%(com_server)s"
;Delete Files
;Delete Uninstaller And Unistall Registry Entries
Delete "$INSTDIR\Uninst.exe"
DeleteRegKey HKEY_LOCAL_MACHINE "SOFTWARE\%(reg_key)s"
DeleteRegKey HKEY_LOCAL_MACHINE "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\%(reg_key)s"
RMDir "$INSTDIR"
SectionEnd
;--------------------------------
Function .onInit
;Language selection dialog
Push ""
Push ${LANG_ENGLISH}
Push English
Push ${LANG_SPANISH}
Push Spanish
Push A ; A means auto count languages
; for the auto count to work the first empty push (Push "") must remain
LangDLL::LangDialog "Installer Language" "Please select the language of the installer"
Pop $LANGUAGE
StrCmp $LANGUAGE "cancel" 0 +2
Abort
FunctionEnd
"""
class build_installer(py2exe):
# This class first builds the exe file(s), then creates a Windows installer.
# You need NSIS (Nullsoft Scriptable Install System) for it.
def run(self):
# Clean up
os.system("del /S /Q dist")
# First, let py2exe do it's work.
py2exe.run(self)
lib_dir = self.lib_dir
dist_dir = self.dist_dir
comserver_files = self.comserver_files
metadata = self.distribution.metadata
# create the Installer, using the files py2exe has created.
script = NSISScript(metadata,
lib_dir,
dist_dir,
self.windows_exe_files,
self.lib_files,
comserver_files)
print "*** creating the nsis script***"
script.create()
print "*** compiling the nsis script***"
script.compile()
# Note: By default the final setup.exe will be in an Output subdirectory.
class NSISScript:
def __init__(self,
metadata,
lib_dir,
dist_dir,
windows_exe_files = [],
lib_files = [],
comserver_files = []):
self.lib_dir = lib_dir
self.dist_dir = dist_dir
if not self.dist_dir[-1] in "\\/":
self.dist_dir += "\\"
self.name = metadata.get_name()
self.description = metadata.get_name()
self.version = metadata.get_version()
self.windows_exe_files = [self.chop(p) for p in windows_exe_files]
self.lib_files = [self.chop(p) for p in lib_files]
self.comserver_files = [self.chop(p) for p in comserver_files if p.lower().endswith(".dll")]
def chop(self, pathname):
assert pathname.startswith(self.dist_dir)
return pathname[len(self.dist_dir):]
def create(self, pathname="base.nsi"):
self.pathname = pathname
ofi = self.file = open(pathname, "w")
ofi.write(nsi_base_script % {
'name': self.name,
'description': "%s version %s" % (self.description, self.version),
'version': self.version,
'install_dir': self.name,
'reg_key': self.name,
'out_file': "instalador-%s-%s.exe" % (self.name, self.version),
'com_server': self.comserver_files[0],
})
def compile(self, pathname="base.nsi"):
os.startfile(pathname, 'compile') | Python |
# Para hacer el ejecutable:
# python setup.py py2exe
#
"Creador de instalador para PyAfipWs (WSMTXCA)"
__author__ = "Mariano Reingart (mariano@nsis.com.ar)"
__copyright__ = "Copyright (C) 2010 Mariano Reingart"
from distutils.core import setup
import py2exe
import glob, sys
# includes for py2exe
includes=['email.generator', 'email.iterators', 'email.message', 'email.utils']
# don't pull in all this MFC stuff used by the makepy UI.
excludes=["pywin", "pywin.dialogs", "pywin.dialogs.list", "win32ui"]
opts = {
'py2exe': {
'includes':includes,
'optimize':2,
'excludes': excludes,
}}
data_files = [
(".", ["wsfev1_wsdl.xml","wsfev1_wsdl_homo.xml", "licencia.txt"]),
("cache", glob.glob("cache/*")),
]
import wsfev1
from nsis import build_installer
setup(
name="WSFEV1",
version=wsfev1.__version__ + (wsfev1.HOMO and '-homo' or '-full'),
description="Interfaz PyAfipWs WSFEv1 %s",
long_description=wsfev1.__doc__,
author="Mariano Reingart",
author_email="reingart@gmail.com",
url="http://www.sistemasagiles.com.ar",
license="GNU GPL v3",
com_server = ["wsfev1"],
console=['wsfev1.py', 'rece1.py', 'wsaa.py'],
options=opts,
data_files = data_files,
cmdclass = {"py2exe": build_installer}
)
| Python |
# Para hacer el ejecutable:
# python setup.py py2exe
#
"Creador de instalador para PyAfipWs (WSMTXCA)"
__author__ = "Mariano Reingart (mariano@nsis.com.ar)"
__copyright__ = "Copyright (C) 2010 Mariano Reingart"
from distutils.core import setup
import py2exe
import glob, sys
# includes for py2exe
includes=['email.generator', 'email.iterators', 'email.message', 'email.utils']
# don't pull in all this MFC stuff used by the makepy UI.
excludes=["pywin", "pywin.dialogs", "pywin.dialogs.list", "win32ui"]
opts = {
'py2exe': {
'includes':includes,
'optimize':2,
'excludes': excludes,
}}
import wsmtx
from nsis import build_installer
data_files = [
(".", ["wsfev1_wsdl.xml","wsfev1_wsdl_homo.xml", "licencia.txt"]),
("cache", glob.glob("cache/*")),
]
setup( name = "WSMTXCA",
version=wsmtx.__version__ + (wsmtx.HOMO and '-homo' or '-full'),
description="Interfaz PyAfipWs WSMTXCA %s",
long_description=wsmtx.__doc__,
author="Mariano Reingart",
author_email="reingart@gmail.com",
url="http://www.sistemasagiles.com.ar",
license="GNU GPL v3",
com_server = ["wsmtx"],
console=['wsmtx.py', 'wsaa.py'],
options=opts,
data_files = data_files,
cmdclass = {"py2exe": build_installer}
) | Python |
#!/usr/bin/python
# Copyright 2011 Google, Inc. All Rights Reserved.
# simple script to walk source tree looking for third-party licenses
# dumps resulting html page to stdout
import os, re, mimetypes, sys
# read source directories to scan from command line
SOURCE = sys.argv[1:]
# regex to find /* */ style comment blocks
COMMENT_BLOCK = re.compile(r"(/\*.+?\*/)", re.MULTILINE | re.DOTALL)
# regex used to detect if comment block is a license
COMMENT_LICENSE = re.compile(r"(license)", re.IGNORECASE)
COMMENT_COPYRIGHT = re.compile(r"(copyright)", re.IGNORECASE)
EXCLUDE_TYPES = [
"application/xml",
"image/png",
]
# list of known licenses; keys are derived by stripping all whitespace and
# forcing to lowercase to help combine multiple files that have same license.
KNOWN_LICENSES = {}
class License:
def __init__(self, license_text):
self.license_text = license_text
self.filenames = []
# add filename to the list of files that have the same license text
def add_file(self, filename):
if filename not in self.filenames:
self.filenames.append(filename)
LICENSE_KEY = re.compile(r"[^\w]")
def find_license(license_text):
# TODO(alice): a lot these licenses are almost identical Apache licenses.
# Most of them differ in origin/modifications. Consider combining similar
# licenses.
license_key = LICENSE_KEY.sub("", license_text).lower()
if license_key not in KNOWN_LICENSES:
KNOWN_LICENSES[license_key] = License(license_text)
return KNOWN_LICENSES[license_key]
def discover_license(exact_path, filename):
# when filename ends with LICENSE, assume applies to filename prefixed
if filename.endswith("LICENSE"):
with open(exact_path) as file:
license_text = file.read()
target_filename = filename[:-len("LICENSE")]
if target_filename.endswith("."): target_filename = target_filename[:-1]
find_license(license_text).add_file(target_filename)
return None
# try searching for license blocks in raw file
mimetype = mimetypes.guess_type(filename)
if mimetype in EXCLUDE_TYPES: return None
with open(exact_path) as file:
raw_file = file.read()
# include comments that have both "license" and "copyright" in the text
for comment in COMMENT_BLOCK.finditer(raw_file):
comment = comment.group(1)
if COMMENT_LICENSE.search(comment) is None: continue
if COMMENT_COPYRIGHT.search(comment) is None: continue
find_license(comment).add_file(filename)
for source in SOURCE:
for root, dirs, files in os.walk(source):
for name in files:
discover_license(os.path.join(root, name), name)
print "<html><head><style> body { font-family: sans-serif; } pre { background-color: #eeeeee; padding: 1em; white-space: pre-wrap; } </style></head><body>"
for license in KNOWN_LICENSES.values():
print "<h3>Notices for files:</h3><ul>"
filenames = license.filenames
filenames.sort()
for filename in filenames:
print "<li>%s</li>" % (filename)
print "</ul>"
print "<pre>%s</pre>" % license.license_text
print "</body></html>"
| Python |
#!/usr/bin/env python2
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import re
try:
import cPickle as pickle
except:
import pickle
import subprocess as SP
from collections import deque
import fetchrev
log = fetchrev.log
def list_files(base):
yield base
if os.path.isdir(base) and not os.path.islink(base):
for item in os.listdir(base):
for entry in list_files(base + '/' + item):
yield entry
def discover_repos():
return [entry for entry in list_files('.')
if os.path.isdir(entry) and entry.endswith('.git')]
def list_reachable_revs():
result = set()
hash_re = re.compile(r'^[a-fA-F0-9]{40}$')
def read_file(filename):
with open(filename) as f:
return f.read()
def process(content, hash_columns=1):
for line in content.split('\n'):
for word in line.split(' ')[0:hash_columns]:
if hash_re.match(word):
result.add(word.lower())
if word[0:1] == '^' and hash_re.match(word[1:]):
# packed-refs peeled tag
result.add(word[1:].lower())
# process refs/
for entry in list_files('refs'):
if os.path.isfile(entry):
process(read_file(entry))
# process logs/
for entry in list_files('logs'):
if os.path.isfile(entry):
process(read_file(entry), hash_columns=2)
# process packed-refs and all refs directly under git dir (*_HEAD etc.)
for entry in os.listdir('.'):
if os.path.isfile(entry):
process(read_file(entry))
# other special-purpose state, such as in-progress rebase or am, isn't
# processed -- it'd be a mess to do correctly and it's not really needed.
return result - set(['0'*40])
def filter_existing_revs(revs):
batch_checker = SP.Popen(['git', 'cat-file', '--batch-check'],
stdin=SP.PIPE, stdout=SP.PIPE)
existing_revs = []
for hash in revs:
batch_checker.stdin.write(hash + '^{}\n')
result = batch_checker.stdout.readline()
if not result.endswith('missing\n'):
existing_revs.append(hash)
batch_checker.stdin.close()
batch_checker.wait()
return existing_revs
def local(input, output, args):
local_root, remote_root = args
pickle.dump(remote_root, output)
os.chdir(local_root)
local_root = os.getcwd()
local_repos = set(discover_repos())
remote_repos = set(pickle.load(input))
for item in (local_repos - remote_repos):
sys.stderr.write('WARNING: {} is only on local side\n'.format(item))
for item in (remote_repos - local_repos):
sys.stderr.write('WARNING: {} is only on remote side\n'.format(item))
for repo in (local_repos & remote_repos):
sys.stderr.write('------- local->remote {} --------\n'.format(repo))
pickle.dump(repo, output)
os.chdir(repo)
revs = filter_existing_revs(list_reachable_revs())
fetchrev.sender(input, output, revs, is_local=True)
input.read(1)
sys.stderr.write('------- remote->local {} --------\n'.format(repo))
fetchrev.receiver(input, output)
os.chdir(local_root)
pickle.dump(None, output)
def remote(input=None, output=None):
if not input: input = os.fdopen(0, 'r', 0)
if not output: output = os.fdopen(1, 'w', 0)
remote_root = pickle.load(input)
os.chdir(remote_root)
remote_root = os.getcwd()
pickle.dump(discover_repos(), output)
while True:
repo = pickle.load(input)
if not repo:
break
os.chdir(remote_root)
os.chdir(repo)
revs = filter_existing_revs(list_reachable_revs())
fetchrev.receiver(input, output)
output.write('F')
fetchrev.sender(input, output, revs, is_local=False)
def connect(ssh_cmd, args):
sys.path.insert(1, sys.path[0]+'/py-remoteexec')
from remoteexec import remote_exec
modules = [sys.path[0]+'/fetchrev.py', sys.path[0]+'/syncgit.py']
p, s = remote_exec(ssh_cmd=ssh_cmd, module_filenames=modules,
main_func='syncgit.remote')
local(s.makefile('r', 0), s.makefile('w', 0), args)
p.wait()
def main():
argv = sys.argv[1:]
ssh_cmd = argv[0:argv.index('--')]
program_args = argv[argv.index('--')+1:]
connect(ssh_cmd, program_args)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python2
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import re
try:
import cPickle as pickle
except:
import pickle
import subprocess as SP
from collections import deque
log = sys.stderr.write
log = lambda msg: None # comment this line to enable verbose mode
ASKED = 1
HAVE = 2
NEED = 3
def sender(input, output, revs, is_local, thin=True):
revs_objects = SP.check_output(
['git', 'rev-parse'] + list(revs)).split()
obj_re = re.compile(r'^[0-9a-fA-F]{40}$')
for obj in revs_objects:
if not obj_re.match(obj): raise ValueError()
log('sender: sending ' + repr(revs_objects) + '\n')
object_status = dict()
query_queue = deque()
def ask(obj):
if obj in object_status: return
query_queue.append(obj)
object_status[obj] = ASKED
log('sender: asking about ' + obj + '\n')
output.write('Q' + obj)
for obj in revs_objects:
ask(obj)
need_something = False
while query_queue:
have_it = input.read(1) == 'Y'
obj = query_queue.popleft()
log('sender: received answer for {}: {}\n'.format(obj, have_it))
object_status[obj] = HAVE if have_it else NEED
if not have_it:
need_something = True
parents = SP.check_output(['git', 'rev-parse', obj+'^@']).split()
for parent in parents:
ask(parent)
if not need_something:
log('sender: no objects needed\n')
output.write('N')
return
log('sender: starting packing\n')
output.write('T')
args = ['git', 'pack-objects', '--progress', '--stdout']
if is_local: args += ['--all-progress']
packer_input = []
if thin:
args += ['--revs', '--thin']
for obj, status in object_status.iteritems():
if status == HAVE:
packer_input.append('^' + obj + '\n')
for obj in revs_objects:
if object_status[obj] == NEED:
packer_input.append(obj + '\n')
else:
# TODO: we need --revs even when thin is False, because object_status
# only has commits and pack-objects wants the complete list of objects,
# including trees and blobs. Can something be done about that?
args += ['--revs']
for obj, status in object_status.iteritems():
if status == NEED:
packer_input.append(obj + '\n')
packer = SP.Popen(args, stdin=SP.PIPE, stdout=output)
packer.communicate(''.join(packer_input))
log('sender: finished\n')
def receiver(input, output):
batch_checker = SP.Popen(['git', 'cat-file', '--batch-check'],
stdin=SP.PIPE, stdout=SP.PIPE)
log('receiver: ready\n')
receiving_objects = True
while True:
command = input.read(1)
if command == 'Q':
hash = input.read(40)
log('receiver: asked about ' + hash + '\n')
# use ^{} to suppress dumb "error: unable to find <hash>" message
# which happens only when input is a plain hash.
batch_checker.stdin.write(hash + '^{}\n')
result = batch_checker.stdout.readline()
output.write('N' if result.endswith('missing\n') else 'Y')
elif command == 'T':
break
elif command == 'N':
receiving_objects = False
break
else:
raise ValueError()
batch_checker.stdin.close()
batch_checker.wait()
if receiving_objects:
log('receiver: starting unpacking\n')
unpacker = SP.Popen(['git', 'unpack-objects'],
stdin=input, stdout=SP.PIPE)
unpacker.wait()
log('receiver: finished\n')
def local(input, output, args):
remote_wd = args[1]
pickle.dump(remote_wd, output)
if args[0] == 'get':
output.write('G')
pickle.dump(args[2:], output)
receiver(input, output)
elif args[0] == 'put':
output.write('P')
sender(input, output, args[2:], True)
else:
raise ValueError()
def remote(input=None, output=None):
if not input: input = os.fdopen(0, 'r', 0)
if not output: output = os.fdopen(1, 'w', 0)
remote_wd = pickle.load(input)
os.chdir(remote_wd)
mode = input.read(1)
if mode == 'G':
revs = pickle.load(input)
log('remote: is sender, and local is receiver\n')
sender(input, output, revs, False)
elif mode == 'P':
log('remote: is receiver, and local is sender\n')
receiver(input, output)
else:
raise ValueError()
def connect(ssh_cmd, args):
sys.path.insert(1, sys.path[0]+'/py-remoteexec')
from remoteexec import remote_exec
modules = [sys.path[0]+'/fetchrev.py']
p, s = remote_exec(ssh_cmd=ssh_cmd, module_filenames=modules,
main_func='fetchrev.remote')
local(s.makefile('r', 0), s.makefile('w', 0), args)
p.wait()
def main():
argv = sys.argv[1:]
ssh_cmd = argv[0:argv.index('--')]
program_args = argv[argv.index('--')+1:]
connect(ssh_cmd, program_args)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python2
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Helper script for test.sh.
import os
import sys
import random
import subprocess as SP
basedata = os.urandom(1024*1024)
def write_object(type, content):
p = SP.Popen(['git', 'hash-object', '-t', type, '-w', '--stdin'],
stdin=SP.PIPE, stdout=SP.PIPE)
out, _ = p.communicate(content)
if p.returncode != 0: raise OSError()
return out.strip()
def update_ref(name, object):
SP.check_call(['git', 'update-ref', name, object])
def make_commit(name, parents):
blob = write_object('blob', basedata + name)
tree = write_object('tree', '100644 d_' + name + '\0' + blob.decode('hex'))
commit_data = 'tree ' + tree + '\n'
for parent in parents:
commit_data += 'parent ' + parent + '\n'
commit_data += '\ncommit ' + name + '\n'
commit = write_object('commit', commit_data)
update_ref('refs/heads/' + name, commit)
return commit
def construct(width, height):
random.seed(width * height * (width+height))
last = []
for row in xrange(height):
current = []
for col in xrange(width):
parents = random.sample(last, 2) if last else []
commit = make_commit('r{}c{}'.format(row+1, col+1), parents)
current.append(commit)
last = current
if __name__ == '__main__':
import sys
if len(sys.argv) == 3:
construct(int(sys.argv[1]), int(sys.argv[2]))
| Python |
#!/usr/bin/env python
import codecs
import re
import jinja2
import markdown
def process_slides():
with codecs.open('../../presentation-output.html', 'w', encoding='utf8') as outfile:
md = codecs.open('slides.md', encoding='utf8').read()
md_slides = md.split('\n---\n')
print 'Compiled %s slides.' % len(md_slides)
slides = []
# Process each slide separately.
for md_slide in md_slides:
slide = {}
sections = md_slide.split('\n\n')
# Extract metadata at the beginning of the slide (look for key: value)
# pairs.
metadata_section = sections[0]
metadata = parse_metadata(metadata_section)
slide.update(metadata)
remainder_index = metadata and 1 or 0
# Get the content from the rest of the slide.
content_section = '\n\n'.join(sections[remainder_index:])
html = markdown.markdown(content_section)
slide['content'] = postprocess_html(html, metadata)
slides.append(slide)
template = jinja2.Template(open('base.html').read())
outfile.write(template.render(locals()))
def parse_metadata(section):
"""Given the first part of a slide, returns metadata associated with it."""
metadata = {}
metadata_lines = section.split('\n')
for line in metadata_lines:
colon_index = line.find(':')
if colon_index != -1:
key = line[:colon_index].strip()
val = line[colon_index + 1:].strip()
metadata[key] = val
return metadata
def postprocess_html(html, metadata):
"""Returns processed HTML to fit into the slide template format."""
if metadata.get('build_lists') and metadata['build_lists'] == 'true':
html = html.replace('<ul>', '<ul class="build">')
html = html.replace('<ol>', '<ol class="build">')
return html
if __name__ == '__main__':
process_slides()
| Python |
#!/usr/bin/env python
import time
t = time.time()
u = time.gmtime(t)
s = time.strftime('%a, %e %b %Y %T GMT', u)
print 'Content-Type: text/javascript'
print 'Cache-Control: no-cache'
print 'Date: ' + s
print 'Expires: ' + s
print ''
print 'var timeskew = new Date().getTime() - ' + str(t*1000) + ';'
| Python |
#!/usr/bin/python
# Copyright 2011 Google, Inc. All Rights Reserved.
# simple script to walk source tree looking for third-party licenses
# dumps resulting html page to stdout
import os, re, mimetypes, sys
# read source directories to scan from command line
SOURCE = sys.argv[1:]
# regex to find /* */ style comment blocks
COMMENT_BLOCK = re.compile(r"(/\*.+?\*/)", re.MULTILINE | re.DOTALL)
# regex used to detect if comment block is a license
COMMENT_LICENSE = re.compile(r"(license)", re.IGNORECASE)
COMMENT_COPYRIGHT = re.compile(r"(copyright)", re.IGNORECASE)
EXCLUDE_TYPES = [
"application/xml",
"image/png",
]
# list of known licenses; keys are derived by stripping all whitespace and
# forcing to lowercase to help combine multiple files that have same license.
KNOWN_LICENSES = {}
class License:
def __init__(self, license_text):
self.license_text = license_text
self.filenames = []
# add filename to the list of files that have the same license text
def add_file(self, filename):
if filename not in self.filenames:
self.filenames.append(filename)
LICENSE_KEY = re.compile(r"[^\w]")
def find_license(license_text):
# TODO(alice): a lot these licenses are almost identical Apache licenses.
# Most of them differ in origin/modifications. Consider combining similar
# licenses.
license_key = LICENSE_KEY.sub("", license_text).lower()
if license_key not in KNOWN_LICENSES:
KNOWN_LICENSES[license_key] = License(license_text)
return KNOWN_LICENSES[license_key]
def discover_license(exact_path, filename):
# when filename ends with LICENSE, assume applies to filename prefixed
if filename.endswith("LICENSE"):
with open(exact_path) as file:
license_text = file.read()
target_filename = filename[:-len("LICENSE")]
if target_filename.endswith("."): target_filename = target_filename[:-1]
find_license(license_text).add_file(target_filename)
return None
# try searching for license blocks in raw file
mimetype = mimetypes.guess_type(filename)
if mimetype in EXCLUDE_TYPES: return None
with open(exact_path) as file:
raw_file = file.read()
# include comments that have both "license" and "copyright" in the text
for comment in COMMENT_BLOCK.finditer(raw_file):
comment = comment.group(1)
if COMMENT_LICENSE.search(comment) is None: continue
if COMMENT_COPYRIGHT.search(comment) is None: continue
find_license(comment).add_file(filename)
for source in SOURCE:
for root, dirs, files in os.walk(source):
for name in files:
discover_license(os.path.join(root, name), name)
print "<html><head><style> body { font-family: sans-serif; } pre { background-color: #eeeeee; padding: 1em; white-space: pre-wrap; } </style></head><body>"
for license in KNOWN_LICENSES.values():
print "<h3>Notices for files:</h3><ul>"
filenames = license.filenames
filenames.sort()
for filename in filenames:
print "<li>%s</li>" % (filename)
print "</ul>"
print "<pre>%s</pre>" % license.license_text
print "</body></html>"
| Python |
#!/usr/bin/python
# Copyright 2011 Google, Inc. All Rights Reserved.
# simple script to walk source tree looking for third-party licenses
# dumps resulting html page to stdout
import os, re, mimetypes, sys
# read source directories to scan from command line
SOURCE = sys.argv[1:]
# regex to find /* */ style comment blocks
COMMENT_BLOCK = re.compile(r"(/\*.+?\*/)", re.MULTILINE | re.DOTALL)
# regex used to detect if comment block is a license
COMMENT_LICENSE = re.compile(r"(license)", re.IGNORECASE)
COMMENT_COPYRIGHT = re.compile(r"(copyright)", re.IGNORECASE)
EXCLUDE_TYPES = [
"application/xml",
"image/png",
]
# list of known licenses; keys are derived by stripping all whitespace and
# forcing to lowercase to help combine multiple files that have same license.
KNOWN_LICENSES = {}
class License:
def __init__(self, license_text):
self.license_text = license_text
self.filenames = []
# add filename to the list of files that have the same license text
def add_file(self, filename):
if filename not in self.filenames:
self.filenames.append(filename)
LICENSE_KEY = re.compile(r"[^\w]")
def find_license(license_text):
# TODO(alice): a lot these licenses are almost identical Apache licenses.
# Most of them differ in origin/modifications. Consider combining similar
# licenses.
license_key = LICENSE_KEY.sub("", license_text).lower()
if license_key not in KNOWN_LICENSES:
KNOWN_LICENSES[license_key] = License(license_text)
return KNOWN_LICENSES[license_key]
def discover_license(exact_path, filename):
# when filename ends with LICENSE, assume applies to filename prefixed
if filename.endswith("LICENSE"):
with open(exact_path) as file:
license_text = file.read()
target_filename = filename[:-len("LICENSE")]
if target_filename.endswith("."): target_filename = target_filename[:-1]
find_license(license_text).add_file(target_filename)
return None
# try searching for license blocks in raw file
mimetype = mimetypes.guess_type(filename)
if mimetype in EXCLUDE_TYPES: return None
with open(exact_path) as file:
raw_file = file.read()
# include comments that have both "license" and "copyright" in the text
for comment in COMMENT_BLOCK.finditer(raw_file):
comment = comment.group(1)
if COMMENT_LICENSE.search(comment) is None: continue
if COMMENT_COPYRIGHT.search(comment) is None: continue
find_license(comment).add_file(filename)
for source in SOURCE:
for root, dirs, files in os.walk(source):
for name in files:
discover_license(os.path.join(root, name), name)
print "<html><head><style> body { font-family: sans-serif; } pre { background-color: #eeeeee; padding: 1em; white-space: pre-wrap; } </style></head><body>"
for license in KNOWN_LICENSES.values():
print "<h3>Notices for files:</h3><ul>"
filenames = license.filenames
filenames.sort()
for filename in filenames:
print "<li>%s</li>" % (filename)
print "</ul>"
print "<pre>%s</pre>" % license.license_text
print "</body></html>"
| Python |
import before_2
dir = "after"
| Python |
#
| Python |
#
| Python |
#
| Python |
#
| Python |
raise NotImplementedError
| Python |
raise ImportError
| Python |
dir = 'x'
| Python |
ddir = 'xx'
| Python |
import before_1
dir = '3'
| Python |
import before_1
dir = '2'
| Python |
dir = "before"
| Python |
# empty file
| Python |
# empty file
| Python |
# Python script to get both the data and resource fork from a BinHex encoded
# file.
# Author: MURAOKA Taro <koron.kaoriya@gmail.com>
# Last Change: 2012 Jun 29
#
# Copyright (C) 2003,12 MURAOKA Taro <koron.kaoriya@gmail.com>
# THIS FILE IS DISTRIBUTED UNDER THE VIM LICENSE.
import sys
import binhex
input = sys.argv[1]
conv = binhex.HexBin(input)
info = conv.FInfo
out = conv.FName
out_data = out
out_rsrc = out + '.rsrcfork'
#print 'out_rsrc=' + out_rsrc
print 'In file: ' + input
outfile = open(out_data, 'wb')
print ' Out data fork: ' + out_data
while 1:
d = conv.read(128000)
if not d: break
outfile.write(d)
outfile.close()
conv.close_data()
d = conv.read_rsrc(128000)
if d:
print ' Out rsrc fork: ' + out_rsrc
outfile = open(out_rsrc, 'wb')
outfile.write(d)
while 1:
d = conv.read_rsrc(128000)
if not d: break
outfile.write(d)
outfile.close()
conv.close()
# vim:set ts=8 sts=4 sw=4 et:
| Python |
# -*- coding: utf-8 -*-
import datetime,urllib, cgi, os
import logging
from google.appengine.api import memcache, urlfetch
from google.appengine.api.labs import taskqueue
from google.appengine.ext import webapp, db
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
from xml.dom import minidom
from gaesessions import get_current_session
webapp.template.register_template_library('filter')
class log(webapp.RequestHandler):
def post(self):
"""Page de login, récupération de la date de connexion, de l'avatar."""
session = get_current_session()
key = self.request.get('key')
skey = s.key()
pseudo = ""
sock = urllib.urlopen("http://hordes.fr/xml/ghost?k=%s;sk=%s"%(key,skey))
ame = minidom.parse(sock)
sock.close()
sock = urllib.urlopen("http://hordes.fr/xml/?k=%s;sk=%s"%(key,skey))
city = minidom.parse(sock)
sock.close()
try: ame
except NameError:
self.redirect('/')
else:
citizen = ame.getElementsByTagName('citizen')
headers = ame.getElementsByTagName('headers')
pseudo = citizen[0].attributes['name'].value
logging.info('%s se connecte'%pseudo)
self.response.out.write(citizen)
self.response.headers.add_header(
'Set-Cookie',
'pseudo=%s; expires=Fri, 31-Dec-2020 23:59:59 GMT' \
% pseudo)
session['pseudo'] = pseudo
me = Player.all().filter('name = ', pseudo.lower()).get()
if not me:
me = Player()
me.name = pseudo.lower()
try:
me.avatar = urlfetch.Fetch('%s%s'% \
(headers[0].attributes['avatarurl'].value,\
citizen[0].attributes['avatar'].value)).content
except:
me.avatar = ""
try:
nowCity = city.getElementsByTagName('city')[0].attributes['city'].value
except:
nowCity = u'Ancienne Cité oubliée'
me.nowCity = nowCity
me.put()
#end if
self.redirect('/')
def get(self):
"""bloque la connexion en get si on est pas en Dev"""
pseudo = self.request.get('p')
if not os.environ['SERVER_SOFTWARE'].startswith('Dev') or pseudo not in ('ozonegrif'):
self.redirect('/')
return True
else:
session = get_current_session()
self.response.headers.add_header(
'Set-Cookie',
'pseudo=%s; expires=Fri, 31-Dec-2020 23:59:59 GMT'% pseudo)
session['pseudo'] = pseudo
self.redirect('/')
class index(webapp.RequestHandler):
def get(self):
"""Page d'acceuil."""
session = get_current_session()
tpl_val = {
'foo': 'bar',
}
path = os.path.join(os.pathname.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, tpl_val))
application = webapp.WSGIApplication([ # box.py
('/', index),
], debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| Python |
# -*- coding: utf-8 -*-
"""A fast, lightweight, and secure session WSGI middleware for use with GAE."""
from Cookie import CookieError, SimpleCookie
from base64 import b64decode, b64encode
import datetime
import hashlib
import hmac
import logging
import pickle
import os
import time
from google.appengine.api import memcache
from google.appengine.ext import db
# Configurable cookie options
COOKIE_NAME_PREFIX = "DgU" # identifies a cookie as being one used by gae-sessions (so you can set cookies too)
COOKIE_PATH = "/"
DEFAULT_COOKIE_ONLY_THRESH = 10240 # 10KB: GAE only allows ~16000B in HTTP header - leave ~6KB for other info
DEFAULT_LIFETIME = datetime.timedelta(days=7)
# constants
SID_LEN = 43 # timestamp (10 chars) + underscore + md5 (32 hex chars)
SIG_LEN = 44 # base 64 encoded HMAC-SHA256
MAX_COOKIE_LEN = 4096
EXPIRE_COOKIE_FMT = ' %s=; expires=Wed, 01-Jan-1970 00:00:00 GMT; Path=' + COOKIE_PATH
COOKIE_FMT = ' ' + COOKIE_NAME_PREFIX + '%02d="%s"; expires=%s; Path=' + COOKIE_PATH + '; HttpOnly'
COOKIE_DATE_FMT = '%a, %d-%b-%Y %H:%M:%S GMT'
COOKIE_OVERHEAD = len(COOKIE_FMT % (0, '', '')) + 29 + 150 # 29=date len, 150=safety margin (e.g., in case browser uses 4000 instead of 4096)
MAX_DATA_PER_COOKIE = MAX_COOKIE_LEN - COOKIE_OVERHEAD
_current_session = None
def get_current_session():
"""Returns the session associated with the current request."""
return _current_session
def is_gaesessions_key(k):
return k.startswith(COOKIE_NAME_PREFIX)
class BottomLessDict(dict):
def __missing__(self, key):
return ''
class SessionModel(db.Model):
"""Contains session data. key_name is the session ID and pdump contains a
pickled dictionary which maps session variables to their values."""
pdump = db.BlobProperty()
class Session(object):
"""Manages loading, reading/writing key-value pairs, and saving of a session.
``sid`` - if set, then the session for that sid (if any) is loaded. Otherwise,
sid will be loaded from the HTTP_COOKIE (if any).
"""
DIRTY_BUT_DONT_PERSIST_TO_DB = 1
def __init__(self, sid=None, lifetime=DEFAULT_LIFETIME, no_datastore=False,
cookie_only_threshold=DEFAULT_COOKIE_ONLY_THRESH, cookie_key=None):
self.sid = None
self.cookie_keys = BottomLessDict()
self.cookie_data = None
self.data = BottomLessDict()
self.dirty = False # has the session been changed?
self.lifetime = lifetime
self.no_datastore = no_datastore
self.cookie_only_thresh = cookie_only_threshold
self.base_key = cookie_key
if sid:
self.__set_sid(sid, False)
self.data = None
else:
self.__read_cookie()
@staticmethod
def __compute_hmac(base_key, sid, text):
"""Computes the signature for text given base_key and sid."""
key = base_key + sid
return b64encode(hmac.new(key, text, hashlib.sha256).digest())
def __read_cookie(self):
"""Reads the HTTP Cookie and loads the sid and data from it (if any)."""
try:
# check the cookie to see if a session has been started
cookie = SimpleCookie(os.environ['HTTP_COOKIE'])
self.cookie_keys = filter(is_gaesessions_key, cookie.keys())
if not self.cookie_keys:
return # no session yet
self.cookie_keys.sort()
data = ''.join(cookie[k].value for k in self.cookie_keys)
i = SIG_LEN + SID_LEN
sig, sid, b64pdump = data[:SIG_LEN], data[SIG_LEN:i], data[i:]
pdump = b64decode(b64pdump)
actual_sig = Session.__compute_hmac(self.base_key, sid, pdump)
if sig == actual_sig:
self.__set_sid(sid, False)
# check for expiration and terminate the session if it has expired
if time.time() > self.get_expiration():
return self.terminate()
if pdump:
self.data = self.__decode_data(pdump)
else:
self.data = None # data is in memcache/db: load it on-demand
else:
logging.warn('cookie with invalid sig received from %s: %s' % (os.environ.get('REMOTE_ADDR'), b64pdump))
except (CookieError, KeyError, IndexError, TypeError):
# there is no cookie (i.e., no session) or the cookie is invalid
self.terminate(False)
def make_cookie_headers(self):
"""Returns a list of cookie headers to send (if any)."""
# expire all cookies if the session has ended
if not self.sid:
return [EXPIRE_COOKIE_FMT % k for k in self.cookie_keys]
if self.cookie_data is None:
return [] # no cookie headers need to be sent
# build the cookie header(s): includes sig, sid, and cookie_data
sig = Session.__compute_hmac(self.base_key, self.sid, self.cookie_data)
cv = sig + self.sid + b64encode(self.cookie_data)
num_cookies = 1 + (len(cv) - 1) / MAX_DATA_PER_COOKIE
m = MAX_DATA_PER_COOKIE
ed = datetime.datetime.fromtimestamp(self.get_expiration()).strftime(COOKIE_DATE_FMT)
cookies = [COOKIE_FMT % (i, cv[i*m:i*m+m], ed) for i in xrange(num_cookies)]
# expire old cookies which aren't needed anymore
old_cookies = xrange(num_cookies, len(self.cookie_keys))
key = COOKIE_NAME_PREFIX + '%02d'
cookies_to_ax = [EXPIRE_COOKIE_FMT % (key % i) for i in old_cookies]
return cookies + cookies_to_ax
def is_active(self):
"""Returns True if this session is active (i.e., it has been assigned a
session ID and will be or has been persisted)."""
return self.sid is not None
def ensure_data_loaded(self):
"""Fetch the session data if it hasn't been retrieved it yet."""
if self.data is None and self.sid:
self.__retrieve_data()
def get_expiration(self):
"""Returns the timestamp at which this session will expire."""
try:
return int(self.sid.split('_')[0])
except:
return 0
def __make_sid(self, expire_ts=None):
"""Returns a new session ID."""
# make a random ID (random.randrange() is 10x faster but less secure?)
if not expire_ts:
expire_dt = datetime.datetime.now() + self.lifetime
expire_ts = int(time.mktime((expire_dt).timetuple()))
else:
expire_ts = int(expire_ts)
return str(expire_ts) + '_' + hashlib.md5(os.urandom(16)).hexdigest()
@staticmethod
def __encode_data(d):
"""Returns a "pickled+" encoding of d. d values of type db.Model are
protobuf encoded before pickling to minimize CPU usage & data size."""
# separate protobufs so we'll know how to decode (they are just strings)
eP = {} # for models encoded as protobufs
eO = {} # for everything else
for k,v in d.iteritems():
if isinstance(v, db.Model):
eP[k] = db.model_to_protobuf(v)
else:
eO[k] = v
return pickle.dumps((eP,eO), 2)
@staticmethod
def __decode_data(pdump):
"""Returns a data dictionary after decoding it from "pickled+" form."""
eP, eO = pickle.loads(pdump)
for k,v in eP.iteritems():
eO[k] = db.model_from_protobuf(v)
return eO
def regenerate_id(self, expiration_ts=None):
"""Assigns the session a new session ID (data carries over). This
should be called whenever a user authenticates to prevent session
fixation attacks.
``expiration_ts`` - The UNIX timestamp the session will expire at. If
omitted, the session expiration time will not be changed.
"""
if self.sid:
self.ensure_data_loaded() # ensure we have the data before we delete it
if expiration_ts is None:
expiration_ts = self.get_expiration()
self.__set_sid(self.__make_sid(expiration_ts))
self.dirty = True # ensure the data is written to the new session
def start(self, expiration_ts=None):
"""Starts a new session. expiration specifies when it will expire. If
expiration is not specified, then self.lifetime will used to
determine the expiration date.
Normally this method does not need to be called directly - a session is
automatically started when the first value is added to the session.
``expiration_ts`` - The UNIX timestamp the session will expire at. If
omitted, the session will expire after the default ``lifetime`` has past
(as specified in ``SessionMiddleware``).
"""
self.dirty = True
self.data = {}
self.__set_sid(self.__make_sid(expiration_ts), True)
def terminate(self, clear_data=True):
"""Deletes the session and its data, and expires the user's cookie."""
if clear_data:
self.__clear_data()
self.sid = None
self.data = {}
self.dirty = False
if self.cookie_keys:
self.cookie_data = '' # trigger the cookies to expire
else:
self.cookie_data = None
def __set_sid(self, sid, make_cookie=True):
"""Sets the session ID, deleting the old session if one existed. The
session's data will remain intact (only the session ID changes)."""
if self.sid:
self.__clear_data()
self.sid = sid
self.db_key = db.Key.from_path(SessionModel.kind(), sid)
# set the cookie if requested
if make_cookie:
self.cookie_data = '' # trigger the cookie to be sent
def __clear_data(self):
"""Deletes this session from memcache and the datastore."""
if self.sid:
memcache.delete(self.sid) # not really needed; it'll go away on its own
try:
db.delete(self.db_key)
except:
pass # either it wasn't in the db (maybe cookie/memcache-only) or db is down => cron will expire it
def __retrieve_data(self):
"""Sets the data associated with this session after retrieving it from
memcache or the datastore. Assumes self.sid is set. Checks for session
expiration after getting the data."""
pdump = memcache.get(self.sid)
if pdump is None:
# memcache lost it, go to the datastore
if self.no_datastore:
logging.info("can't find session data in memcache for sid=%s (using memcache only sessions)" % self.sid)
self.terminate(False) # we lost it; just kill the session
return
session_model_instance = db.get(self.db_key)
if session_model_instance:
pdump = session_model_instance.pdump
else:
logging.error("can't find session data in the datastore for sid=%s" % self.sid)
self.terminate(False) # we lost it; just kill the session
return
self.data = self.__decode_data(pdump)
def save(self, persist_even_if_using_cookie=False):
"""Saves the data associated with this session IF any changes have been
made (specifically, if any mutator methods like __setitem__ or the like
is called).
If the data is small enough it will be sent back to the user in a cookie
instead of using memcache and the datastore. If `persist_even_if_using_cookie`
evaluates to True, memcache and the datastore will also be used. If the
no_datastore option is set, then the datastore will never be used.
Normally this method does not need to be called directly - a session is
automatically saved at the end of the request if any changes were made.
"""
if not self.sid:
return # no session is active
if not self.dirty:
return # nothing has changed
dirty = self.dirty
self.dirty = False # saving, so it won't be dirty anymore
# do the pickling ourselves b/c we need it for the datastore anyway
pdump = self.__encode_data(self.data)
# persist via cookies if it is reasonably small
if len(pdump)*4/3 <= self.cookie_only_thresh: # 4/3 b/c base64 is ~33% bigger
self.cookie_data = pdump
if not persist_even_if_using_cookie:
return
elif self.cookie_keys:
# latest data will only be in the backend, so expire data cookies we set
self.cookie_data = ''
memcache.set(self.sid, pdump) # may fail if memcache is down
# persist the session to the datastore
if dirty is Session.DIRTY_BUT_DONT_PERSIST_TO_DB or self.no_datastore:
return
try:
SessionModel(key_name=self.sid, pdump=pdump).put()
except Exception, e:
logging.warning("unable to persist session to datastore for sid=%s (%s)" % (self.sid,e))
# Users may interact with the session through a dictionary-like interface.
def clear(self):
"""Removes all data from the session (but does not terminate it)."""
if self.sid:
self.data = {}
self.dirty = True
def get(self, key, default=""):
"""Retrieves a value from the session."""
self.ensure_data_loaded()
return self.data.get(key, default)
def has_key(self, key):
"""Returns True if key is set."""
self.ensure_data_loaded()
return self.data.has_key(key)
def pop(self, key, default=None):
"""Removes key and returns its value, or default if key is not present."""
self.ensure_data_loaded()
self.dirty = True
return self.data.pop(key, default)
def pop_quick(self, key, default=None):
"""Removes key and returns its value, or default if key is not present.
The change will only be persisted to memcache until another change
necessitates a write to the datastore."""
self.ensure_data_loaded()
if self.dirty is False:
self.dirty = Session.DIRTY_BUT_DONT_PERSIST_TO_DB
return self.data.pop(key, default)
def set_quick(self, key, value):
"""Set a value named key on this session. The change will only be
persisted to memcache until another change necessitates a write to the
datastore. This will start a session if one is not already active."""
dirty = self.dirty
self[key] = value
if dirty is False or dirty is Session.DIRTY_BUT_DONT_PERSIST_TO_DB:
self.dirty = Session.DIRTY_BUT_DONT_PERSIST_TO_DB
def __getitem__(self, key):
"""Returns the value associated with key on this session."""
self.ensure_data_loaded()
return self.data.__getitem__(key)
def __setitem__(self, key, value):
"""Set a value named key on this session. This will start a session if
one is not already active."""
self.ensure_data_loaded()
if not self.sid:
self.start()
self.data.__setitem__(key, value)
self.dirty = True
def __delitem__(self, key):
"""Deletes the value associated with key on this session."""
self.ensure_data_loaded()
self.data.__delitem__(key)
self.dirty = True
def __iter__(self):
"""Returns an iterator over the keys (names) of the stored values."""
self.ensure_data_loaded()
return self.data.iterkeys()
def __contains__(self, key):
"""Returns True if key is present on this session."""
self.ensure_data_loaded()
return self.data.__contains__(key)
def __str__(self):
"""Returns a string representation of the session."""
if self.sid:
self.ensure_data_loaded()
return "SID=%s %s" % (self.sid, self.data)
else:
return "uninitialized session"
class SessionMiddleware(object):
"""WSGI middleware that adds session support.
``cookie_key`` - A key used to secure cookies so users cannot modify their
content. Keys should be at least 32 bytes (RFC2104). Tip: generate your
key using ``os.urandom(64)`` but do this OFFLINE and copy/paste the output
into a string which you pass in as ``cookie_key``. If you use ``os.urandom()``
to dynamically generate your key at runtime then any existing sessions will
become junk every time your app starts up!
``lifetime`` - ``datetime.timedelta`` that specifies how long a session may last. Defaults to 7 days.
``no_datastore`` - By default all writes also go to the datastore in case
memcache is lost. Set to True to never use the datastore. This improves
write performance but sessions may be occassionally lost.
``cookie_only_threshold`` - A size in bytes. If session data is less than this
threshold, then session data is kept only in a secure cookie. This avoids
memcache/datastore latency which is critical for small sessions. Larger
sessions are kept in memcache+datastore instead. Defaults to 10KB.
"""
def __init__(self, app, cookie_key, lifetime=DEFAULT_LIFETIME, no_datastore=False, cookie_only_threshold=DEFAULT_COOKIE_ONLY_THRESH):
self.app = app
self.lifetime = lifetime
self.no_datastore = no_datastore
self.cookie_only_thresh = cookie_only_threshold
self.cookie_key = cookie_key
if not self.cookie_key:
raise ValueError("cookie_key MUST be specified")
if len(self.cookie_key) < 32:
raise ValueError("RFC2104 recommends you use at least a 32 character key. Try os.urandom(64) to make a key.")
def __call__(self, environ, start_response):
# initialize a session for the current user
global _current_session
_current_session = Session(lifetime=self.lifetime, no_datastore=self.no_datastore, cookie_only_threshold=self.cookie_only_thresh, cookie_key=self.cookie_key)
# create a hook for us to insert a cookie into the response headers
def my_start_response(status, headers, exc_info=None):
_current_session.save() # store the session if it was changed
for ch in _current_session.make_cookie_headers():
headers.append(('Set-Cookie', ch))
return start_response(status, headers, exc_info)
# let the app do its thing
return self.app(environ, my_start_response)
class DjangoSessionMiddleware(object):
"""Django middleware that adds session support. You must specify the
session configuration parameters by modifying the call to ``SessionMiddleware``
in ``DjangoSessionMiddleware.__init__()`` since Django cannot call an
initialization method with parameters.
"""
def __init__(self):
fake_app = lambda environ, start_response : start_response
self.wrapped_wsgi_middleware = SessionMiddleware(fake_app, cookie_key='you MUST change this')
self.response_handler = None
def process_request(self, request):
self.response_handler = self.wrapped_wsgi_middleware(None, lambda status, headers, exc_info : headers)
request.session = get_current_session() # for convenience
def process_response(self, request, response):
if self.response_handler:
session_headers = self.response_handler(None, [], None)
for k,v in session_headers:
response[k] = v
self.response_handler = None
return response
def delete_expired_sessions():
"""Deletes expired sessions from the datastore.
If there are more than 500 expired sessions, only 500 will be removed.
Returns True if all expired sessions have been removed.
"""
now_str = unicode(int(time.time()))
q = db.Query(SessionModel, keys_only=True)
key = db.Key.from_path('SessionModel', now_str + u'\ufffd')
q.filter('__key__ < ', key)
results = q.fetch(500)
db.delete(results)
logging.info('gae-sessions: deleted %d expired sessions from the datastore' % len(results))
return len(results)<500
| Python |
#!/usr/bin/env python
import codecs
import re
import jinja2
import markdown
def process_slides():
with codecs.open('../../presentation-output.html', 'w', encoding='utf8') as outfile:
md = codecs.open('slides.md', encoding='utf8').read()
md_slides = md.split('\n---\n')
print 'Compiled %s slides.' % len(md_slides)
slides = []
# Process each slide separately.
for md_slide in md_slides:
slide = {}
sections = md_slide.split('\n\n')
# Extract metadata at the beginning of the slide (look for key: value)
# pairs.
metadata_section = sections[0]
metadata = parse_metadata(metadata_section)
slide.update(metadata)
remainder_index = metadata and 1 or 0
# Get the content from the rest of the slide.
content_section = '\n\n'.join(sections[remainder_index:])
html = markdown.markdown(content_section)
slide['content'] = postprocess_html(html, metadata)
slides.append(slide)
template = jinja2.Template(open('base.html').read())
outfile.write(template.render(locals()))
def parse_metadata(section):
"""Given the first part of a slide, returns metadata associated with it."""
metadata = {}
metadata_lines = section.split('\n')
for line in metadata_lines:
colon_index = line.find(':')
if colon_index != -1:
key = line[:colon_index].strip()
val = line[colon_index + 1:].strip()
metadata[key] = val
return metadata
def postprocess_html(html, metadata):
"""Returns processed HTML to fit into the slide template format."""
if metadata.get('build_lists') and metadata['build_lists'] == 'true':
html = html.replace('<ul>', '<ul class="build">')
html = html.replace('<ol>', '<ol class="build">')
return html
if __name__ == '__main__':
process_slides()
| Python |
#!/usr/bin/python
import httplib2
import os
import sys
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
# An OAuth 2 access scope that allows for full read/write access.
YOUTUBE_READ_WRITE_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
message=MISSING_CLIENT_SECRETS_MESSAGE,
scope=YOUTUBE_READ_WRITE_SCOPE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
playlists_insert_response = youtube.playlists().insert(
part="snippet,status",
body=dict(
snippet=dict(
title="Test Playlist",
description="A private playlist created with the YouTube API v3"
),
status=dict(
privacyStatus="private"
)
)
).execute()
print "New playlist id: %s" % playlists_insert_response["id"] | Python |
#!/usr/bin/python
import httplib2
import os
import sys
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
# A limited OAuth 2 access scope that allows for uploading files, but not other
# types of account access.
YOUTUBE_READONLY_SCOPE = "https://www.googleapis.com/auth/youtube.readonly"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
message=MISSING_CLIENT_SECRETS_MESSAGE,
scope=YOUTUBE_READONLY_SCOPE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
channels_response = youtube.channels().list(
mine=True,
part="contentDetails"
).execute()
for channel in channels_response["items"]:
uploads_list_id = channel["contentDetails"]["relatedPlaylists"]["uploads"]
print "Videos in list %s" % uploads_list_id
next_page_token = ""
while next_page_token is not None:
playlistitems_response = youtube.playlistItems().list(
playlistId=uploads_list_id,
part="snippet",
maxResults=50,
pageToken=next_page_token
).execute()
for playlist_item in playlistitems_response["items"]:
title = playlist_item["snippet"]["title"]
video_id = playlist_item["snippet"]["resourceId"]["videoId"]
print "%s (%s)" % (title, video_id)
next_page_token = playlistitems_response.get("tokenPagination", {}).get(
"nextPageToken")
print | Python |
#!/usr/bin/python
import httplib2
import os
import random
import sys
import time
from apiclient.discovery import build
from apiclient.errors import HttpError
from apiclient.http import MediaFileUpload
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
from optparse import OptionParser
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# An OAuth 2 access scope that allows for full read/write access.
YOUTUBE_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
def get_authenticated_service():
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope=YOUTUBE_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def add_subscription(options):
youtube = get_authenticated_service()
add_subscription_response = youtube.subscriptions().insert(
part='snippet',
body=dict(
snippet=dict(
resourceId=dict(
kind='youtube#channel',
channelId=options.channel_id
)
)
)).execute()
channel_title = add_subscription_response["snippet"]["title"]
print "A subscription to %s was added." % channel_title
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("--channel-id", dest="channel_id", help="Channel ID",
default="UCtVd0c0tGXuTSbU5d8cSBUg")
(options, args) = parser.parse_args()
add_subscription(options)
| Python |
#!/usr/bin/python
import httplib2
import os
import sys
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
from optparse import OptionParser
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# An OAuth 2 access scope that allows for full read/write access.
YOUTUBE_READ_WRITE_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
def get_authenticated_service():
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_READ_WRITE_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def upload_thumbnail(youtube, video_id, file):
youtube.thumbnails().set(
videoId=video_id,
media_body=file
).execute()
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("--videoid", dest="videoid",
help="Required; id of video whose thumbnail you're updating.")
parser.add_option("--file", dest="file",
help="Required; path to thumbnail image file.")
(options, args) = parser.parse_args()
if not options.videoid or not options.file:
parser.print_help()
exit()
if not os.path.exists(options.file):
exit("Please specify a valid file using the --file= parameter.")
youtube = get_authenticated_service()
upload_thumbnail(youtube, options.videoid, options.file)
print "The custom thumbnail was successfully set."
| Python |
#!/usr/bin/python
import httplib2
import os
import random
import sys
import time
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
from optparse import OptionParser
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# A limited OAuth 2 access scope that allows for uploading files, but not other
# types of account access.
YOUTUBE_READONLY_SCOPE = "https://www.googleapis.com/auth/youtube.readonly"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
def get_authenticated_service():
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_READONLY_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def list_broadcasts(options):
youtube = get_authenticated_service()
list_broadcasts_response = youtube.liveBroadcasts().list(
broadcastStatus=options.broadcastStatus,
part="id,snippet",
maxResults=options.maxResults
).execute()
if not list_broadcasts_response["items"]:
print "No broadcast was not found."
sys.exit(1)
broadcasts = []
for result in list_broadcasts_response.get("items", []):
broadcasts.append("%s (%s)" % (result["snippet"]["title"],result["id"]))
print "Broadcasts:\n", "\n".join(broadcasts), "\n"
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("--part", dest="part", help="Response parts",
default="id")
parser.add_option("--broadcast-status", dest="broadcastStatus",
help="Broadcast status", default="all")
parser.add_option("--max-results", dest="maxResults",
help="Max results", default=25)
(options, args) = parser.parse_args()
list_broadcasts(options)
| Python |
#!/usr/bin/python
import httplib2
import os
import random
import sys
import time
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
from optparse import OptionParser
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# An OAuth 2 access scope that allows for full read/write access.
YOUTUBE_READONLY_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
def get_authenticated_service():
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_READONLY_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def insert_broadcast(youtube, options):
insert_broadcast_response = youtube.liveBroadcasts().insert(
part="snippet,status",
body=dict(
kind='youtube#liveBroadcast',
snippet=dict(
title=options.broadcast_title,
scheduledStartTime=options.start_time,
scheduledEndTime=options.end_time
),
status=dict(
privacyStatus=options.privacy_status
))).execute()
snippet = insert_broadcast_response["snippet"]
print "Broadcast '%s' with title '%s' was published at '%s'" % (insert_broadcast_response["id"], snippet["title"], snippet["publishedAt"])
return insert_broadcast_response["id"]
def insert_stream(youtube, options):
insert_stream_response = youtube.liveStreams().insert(
part="snippet,cdn",
body=dict(
kind='youtube#liveStream',
snippet=dict(
title=options.stream_title
),
cdn=dict(
format="1080p",
ingestionType="rtmp"
))).execute()
snippet = insert_stream_response["snippet"]
print "Stream '%s' with title '%s' was inserted" % (insert_stream_response["id"], snippet["title"])
return insert_stream_response["id"]
def bind_broadcast(youtube, broadcast_id, stream_id):
bind_broadcast_response = youtube.liveBroadcasts().bind(
part="id,contentDetails",
id=broadcast_id,
streamId=stream_id).execute()
print "Broadcast '%s' was bound to stream '%s'." % (bind_broadcast_response["id"], bind_broadcast_response["contentDetails"]["boundStreamId"])
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("--broadcast-title", dest="broadcast_title", help="Broadcast title",
default="New Broadcast")
parser.add_option("--privacy-status", dest="privacy_status",
help="Broadcast privacy status", default="private")
parser.add_option("--start-time", dest="start_time",
help="Scheduled start time", default='2014-01-30T00:00:00.000Z')
parser.add_option("--end-time", dest="end_time",
help="Scheduled end time", default='2014-01-31T00:00:00.000Z')
parser.add_option("--stream-title", dest="stream_title", help="Stream title",
default="New Stream")
(options, args) = parser.parse_args()
youtube = get_authenticated_service()
broadcast_id = insert_broadcast(youtube, options)
stream_id = insert_stream(youtube, options)
bind_broadcast(youtube, broadcast_id, stream_id)
| Python |
#!/usr/bin/python
from apiclient.discovery import build
from optparse import OptionParser
# Set DEVELOPER_KEY to the "API key" value from the "Access" tab of the
# Google APIs Console http://code.google.com/apis/console#access
# Please ensure that you have enabled the YouTube Data API for your project.
DEVELOPER_KEY = "REPLACE_ME"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtube_search(options):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
search_response = youtube.search().list(
q=options.q,
part="id,snippet",
maxResults=options.maxResults
).execute()
videos = []
channels = []
playlists = []
for search_result in search_response.get("items", []):
if search_result["id"]["kind"] == "youtube#video":
videos.append("%s (%s)" % (search_result["snippet"]["title"],
search_result["id"]["videoId"]))
elif search_result["id"]["kind"] == "youtube#channel":
channels.append("%s (%s)" % (search_result["snippet"]["title"],
search_result["id"]["channelId"]))
elif search_result["id"]["kind"] == "youtube#playlist":
playlists.append("%s (%s)" % (search_result["snippet"]["title"],
search_result["id"]["playlistId"]))
print "Videos:\n", "\n".join(videos), "\n"
print "Channels:\n", "\n".join(channels), "\n"
print "Playlists:\n", "\n".join(playlists), "\n"
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("--q", dest="q", help="Search term",
default="Google")
parser.add_option("--max-results", dest="maxResults",
help="Max results", default=25)
(options, args) = parser.parse_args()
youtube_search(options) | Python |
#!/usr/bin/python
import httplib2
import os
import sys
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
from optparse import OptionParser
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# An OAuth 2 access scope that allows for full read/write access.
YOUTUBE_READ_WRITE_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
def get_authenticated_service():
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_READ_WRITE_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def post_bulletin(youtube, options):
body = dict(
snippet=dict(
description=options.message
)
)
if options.videoid:
body["contentDetails"] = dict(
bulletin=dict(
resourceId=dict(
kind="youtube#video",
videoId=options.videoid
)
)
)
if options.playlistid:
body["contentDetails"] = dict(
bulletin=dict(
resourceId=dict(
kind="youtube#playlist",
playlistId=options.playlistid
)
)
)
youtube.activities().insert(
part=",".join(body.keys()),
body=body
).execute()
print "The bulletin was posted to your channel."
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("--message", dest="message",
help="Required text of message to post.")
parser.add_option("--videoid", dest="videoid",
help="Optional ID of video to post.")
parser.add_option("--playlistid", dest="playlistid",
help="Optional ID of playlist to post.")
(options, args) = parser.parse_args()
# You can post a message with or without an accompanying video or playlist.
# You can't post both a video and playlist at the same time.
if options.videoid and options.playlistid:
parser.print_help()
exit("\nYou cannot post a video and a playlist at the same time.")
if not options.message:
parser.print_help()
exit("\nPlease provide a message.")
youtube = get_authenticated_service()
post_bulletin(youtube, options)
| Python |
#!/usr/bin/python
import httplib2
import os
import sys
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
from optparse import OptionParser
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
# An OAuth 2 access scope that allows for full read/write access
YOUTUBE_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def get_authenticated_service():
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope=YOUTUBE_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def add_featured_video(options):
youtube = get_authenticated_service()
add_video_request = youtube.channels().update(
part="invideoPromotion",
# Test different payloads in the API explorer:
# https://developers.google.com/youtube/v3/docs/channels/update#try-it
body={
"invideoPromotion": {
"position": {
"cornerPosition": options.position,
"type": "corner"
},
"items": [{
"type": "video",
"videoId": options.video_id
}],
"timing": {
"offsetMs": options.offset_ms,
"type": options.offset_type
}
},
"id": options.channel_id
})
add_video_response = add_video_request.execute()
print "Added featured video %s to channel %s." % (
add_video_response["invideoPromotion"]["items"][0]["videoId"],
add_video_response["id"])
# If offsetMs or position are not valid, the API will throw an error
VALID_OFFSET_TYPES = ("offsetFromEnd", "offsetFromStart",)
VALID_POSITIONS = ("topLeft", "topRight", "bottomLeft", "bottomRight",)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("--channel_id", dest="channel_id", help="Channel ID of the channel to add a featured video")
parser.add_option("--video_id", dest="video_id", help="Video ID to feature on your channel")
parser.add_option("--position", dest="position",
help="Position to show promotion. Options are: %s" % ", ".join(VALID_POSITIONS),
default="bottomLeft")
parser.add_option("--offset_ms", dest="offset_ms",
help="Offset in milliseconds to show video. Default is 10000, or 10 seconds",
default="10000")
parser.add_option("--offset_type", dest="offset_type",
help="Describes whether the offset is from the beginning or end of video playback."
+ " Valid options are: %s" % ",".join(VALID_OFFSET_TYPES),
default="offsetFromEnd")
(options, args) = parser.parse_args()
# Require a channel ID and video ID
if options.channel_id is None:
exit("Please specify a valid channel ID using the --channel_id parameter.")
elif options.video_id is None:
exit("Please specify a valid video ID to feature using the --video_id parameter.")
# Validate offset type and position parameters
if options.offset_type not in VALID_OFFSET_TYPES:
exit("offset_type must be one of: %s" % ",".join(VALID_OFFSET_TYPES))
if options.position not in VALID_POSITIONS:
exit("position must be one of: %s" % ", ".join(VALID_POSITIONS))
else:
add_featured_video(options)
| Python |
#!/usr/bin/python
import httplib2
import os
import sys
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
from optparse import OptionParser
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
# An OAuth 2 access scope that allows for full read/write access.
YOUTUBE_READ_WRITE_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def get_authenticated_service():
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_READ_WRITE_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def like_video(youtube, video_id):
channels_list_response = youtube.channels().list(
mine=True,
part="contentDetails"
).execute()
# Adding a video as a favorite or to the watch later list is done via the
# same basic process. Just read the list id of the corresponding playlist
# instead of "likes" as we're doing here.
liked_list_id = channels_list_response["items"][0]["contentDetails"]["relatedPlaylists"]["likes"]
body = dict(
snippet=dict(
playlistId=liked_list_id,
resourceId=dict(
kind="youtube#video",
videoId=video_id
)
)
)
youtube.playlistItems().insert(
part=",".join(body.keys()),
body=body
).execute()
print "%s has been liked." % video_id
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("--videoid", dest="videoid",
default="L-oNKK1CrnU", help="ID of video to like.")
(options, args) = parser.parse_args()
youtube = get_authenticated_service()
like_video(youtube, options.videoid) | Python |
#!/usr/bin/python
import httplib
import httplib2
import os
import random
import sys
import time
from apiclient.discovery import build
from apiclient.errors import HttpError
from apiclient.http import MediaFileUpload
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
from optparse import OptionParser
# Explicitly tell the underlying HTTP transport library not to retry, since
# we are handling retry logic ourselves.
httplib2.RETRIES = 1
# Maximum number of times to retry before giving up.
MAX_RETRIES = 10
# Always retry when these exceptions are raised.
RETRIABLE_EXCEPTIONS = (httplib2.HttpLib2Error, IOError, httplib.NotConnected,
httplib.IncompleteRead, httplib.ImproperConnectionState,
httplib.CannotSendRequest, httplib.CannotSendHeader,
httplib.ResponseNotReady, httplib.BadStatusLine)
# Always retry when an apiclient.errors.HttpError with one of these status
# codes is raised.
RETRIABLE_STATUS_CODES = [500, 502, 503, 504]
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# A limited OAuth 2 access scope that allows for uploading files, but not other
# types of account access.
YOUTUBE_UPLOAD_SCOPE = "https://www.googleapis.com/auth/youtube.upload"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
def get_authenticated_service():
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope=YOUTUBE_UPLOAD_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def initialize_upload(options):
youtube = get_authenticated_service()
tags = None
if options.keywords:
tags = options.keywords.split(",")
insert_request = youtube.videos().insert(
part="snippet,status",
body=dict(
snippet=dict(
title=options.title,
description=options.description,
tags=tags,
categoryId=options.category
),
status = dict(
privacyStatus=options.privacyStatus
)
),
media_body=MediaFileUpload(options.file, chunksize=-1, resumable=True)
)
resumable_upload(insert_request)
def resumable_upload(insert_request):
response = None
error = None
retry = 0
while response is None:
try:
print "Uploading file..."
status, response = insert_request.next_chunk()
if 'id' in response:
print "'%s' (video id: %s) was successfully uploaded." % (
options.title, response['id'])
else:
exit("The upload failed with an unexpected response: %s" % response)
except HttpError, e:
if e.resp.status in RETRIABLE_STATUS_CODES:
error = "A retriable HTTP error %d occurred:\n%s" % (e.resp.status,
e.content)
else:
raise
except RETRIABLE_EXCEPTIONS, e:
error = "A retriable error occurred: %s" % e
if error is not None:
print error
retry += 1
if retry > MAX_RETRIES:
exit("No longer attempting to retry.")
max_sleep = 2 ** retry
sleep_seconds = random.random() * max_sleep
print "Sleeping %f seconds and then retrying..." % sleep_seconds
time.sleep(sleep_seconds)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("--file", dest="file", help="Video file to upload")
parser.add_option("--title", dest="title", help="Video title",
default="Test Title")
parser.add_option("--description", dest="description", help="Video description",
default="Test Description")
parser.add_option("--category", dest="category", help="Video category",
default="22")
parser.add_option("--keywords", dest="keywords",
help="Video keywords, comma separated", default="")
parser.add_option("--privacyStatus", dest="privacyStatus", help="Video privacy status",
default="unlisted")
(options, args) = parser.parse_args()
if options.file is None or not os.path.exists(options.file):
exit("Please specify a valid file using the --file= parameter.")
else:
initialize_upload(options)
| Python |
#!/usr/bin/python
import httplib2
import os
import random
import sys
import time
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
from optparse import OptionParser
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# A limited OAuth 2 access scope that allows for uploading files, but not other
# types of account access.
YOUTUBE_READONLY_SCOPE = "https://www.googleapis.com/auth/youtube.readonly"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
def get_authenticated_service():
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_READONLY_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def list_streams(options):
youtube = get_authenticated_service()
list_streams_response = youtube.liveStreams().list(
part="id,snippet",
mine="true",
maxResults=options.maxResults
).execute()
if not list_streams_response["items"]:
print "No stream was found."
sys.exit(1)
streams = []
for result in list_streams_response.get("items", []):
streams.append("%s (%s)" % (result["snippet"]["title"],result["id"]))
print "Streams:\n", "\n".join(streams), "\n"
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("--max-results", dest="maxResults",
help="Max results", default=25)
(options, args) = parser.parse_args()
list_streams(options)
| Python |
#!/usr/bin/python
import httplib2
import os
import random
import sys
import time
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
from optparse import OptionParser
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data API for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# An OAuth 2 access scope that allows for full read/write access.
YOUTUBE_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
def get_authenticated_service():
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope=YOUTUBE_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def update_video(options):
youtube = get_authenticated_service()
videos_list_response = youtube.videos().list(
id=options.videoid,
part='snippet'
).execute()
if not videos_list_response["items"]:
print "Video '%s' was not found." % options.videoid
sys.exit(1)
videos_list_snippet = videos_list_response["items"][0]["snippet"]
if "tags" not in videos_list_snippet:
videos_list_snippet["tags"] = []
videos_list_snippet["tags"].append(options.tag)
videos_update_response = youtube.videos().update(
part='snippet',
body=dict(
snippet=videos_list_snippet,
id=options.videoid
)).execute()
video_title = videos_update_response["snippet"]["title"]
print "Tag '%s' was added to video '%s'." % (options.tag, video_title)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("--videoid", dest="videoid",
help="ID of video to update.")
parser.add_option("--tag", dest="tag",
default="youtube", help="Additional tag to add to video.")
(options, args) = parser.parse_args()
update_video(options)
| Python |
#!/usr/bin/python
from datetime import datetime, timedelta
import httplib2
import os
import sys
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
from optparse import OptionParser
# CLIENT_SECRETS_FILE, name of a file containing the OAuth 2.0 information for
# this application, including client_id and client_secret. You can acquire an
# ID/secret pair from the API Access tab on the Google APIs Console
# http://code.google.com/apis/console#access
# For more information about using OAuth2 to access Google APIs, please visit:
# https://developers.google.com/accounts/docs/OAuth2
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# Please ensure that you have enabled the YouTube Data & Analytics APIs for your project.
CLIENT_SECRETS_FILE = "client_secrets.json"
# We will require read-only access to the YouTube Data and Analytics API.
YOUTUBE_SCOPES = ["https://www.googleapis.com/auth/youtube.readonly",
"https://www.googleapis.com/auth/yt-analytics.readonly"]
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
YOUTUBE_ANALYTICS_API_SERVICE_NAME = "youtubeAnalytics"
YOUTUBE_ANALYTICS_API_VERSION = "v1"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
now = datetime.now()
one_day_ago = (now - timedelta(days=1)).strftime("%Y-%m-%d")
one_week_ago = (now - timedelta(days=7)).strftime("%Y-%m-%d")
parser = OptionParser()
parser.add_option("--metrics", dest="metrics", help="Report metrics",
default="views,comments,favoritesAdded,favoritesRemoved,likes,dislikes,shares")
parser.add_option("--dimensions", dest="dimensions", help="Report dimensions",
default="video")
parser.add_option("--start-date", dest="start_date",
help="Start date, in YYYY-MM-DD format", default=one_week_ago)
parser.add_option("--end-date", dest="end_date",
help="End date, in YYYY-MM-DD format", default=one_day_ago)
parser.add_option("--start-index", dest="start_index", help="Start index",
default=1, type="int")
parser.add_option("--max-results", dest="max_results", help="Max results",
default=10, type="int")
parser.add_option("--sort", dest="sort", help="Sort order", default="-views")
(options, args) = parser.parse_args()
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
message=MISSING_CLIENT_SECRETS_MESSAGE,
scope=" ".join(YOUTUBE_SCOPES))
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
http = credentials.authorize(httplib2.Http())
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, http=http)
youtube_analytics = build(YOUTUBE_ANALYTICS_API_SERVICE_NAME,
YOUTUBE_ANALYTICS_API_VERSION, http=http)
channels_response = youtube.channels().list(
mine="",
part="id"
).execute()
for channel in channels_response.get("items", []):
channel_id = channel["id"]
analytics_response = youtube_analytics.reports().query(
ids="channel==%s" % channel_id,
metrics=options.metrics,
dimensions=options.dimensions,
start_date=options.start_date,
end_date=options.end_date,
start_index=options.start_index,
max_results=options.max_results,
sort=options.sort
).execute()
print "Analytics Data for Channel %s" % channel_id
for column_header in analytics_response.get("columnHeaders", []):
print "%-20s" % column_header["name"],
print
for row in analytics_response.get("rows", []):
for value in row:
print "%-20s" % value,
print
| Python |
#!/usr/bin/python
from apiclient.discovery import build
from optparse import OptionParser
import json
import urllib
# Set DEVELOPER_KEY to the "API key" value from the "Access" tab of the
# Google APIs Console http://code.google.com/apis/console#access
# Please ensure that you have enabled the YouTube Data API and Freebase API
# for your project.
DEVELOPER_KEY = "REPLACE_ME"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
FREEBASE_SEARCH_URL = "https://www.googleapis.com/freebase/v1/search?%s"
def get_topic_id(options):
freebase_params = dict(query=options.query, key=DEVELOPER_KEY)
freebase_url = FREEBASE_SEARCH_URL % urllib.urlencode(freebase_params)
freebase_response = json.loads(urllib.urlopen(freebase_url).read())
if len(freebase_response["result"]) == 0:
exit("No matching terms were found in Freebase.")
mids = []
index = 1
print "The following topics were found:"
for result in freebase_response["result"]:
mids.append(result["mid"])
print " %2d. %s (%s)" % (index, result.get("name", "Unknown"),
result.get("notable", {}).get("name", "Unknown"))
index += 1
mid = None
while mid is None:
index = raw_input("Enter a topic number to find related YouTube %ss: " %
options.type)
try:
mid = mids[int(index) - 1]
except ValueError:
pass
return mid
def youtube_search(mid, options):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
search_response = youtube.search().list(
topicId=mid,
type=options.type,
part="id,snippet",
maxResults=options.maxResults
).execute()
for search_result in search_response.get("items", []):
if search_result["id"]["kind"] == "youtube#video":
print "%s (%s)" % (search_result["snippet"]["title"],
search_result["id"]["videoId"])
elif search_result["id"]["kind"] == "youtube#channel":
print "%s (%s)" % (search_result["snippet"]["title"],
search_result["id"]["channelId"])
elif search_result["id"]["kind"] == "youtube#playlist":
print "%s (%s)" % (search_result["snippet"]["title"],
search_result["id"]["playlistId"])
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("--query", dest="query", help="Freebase search term",
default="Google")
parser.add_option("--max-results", dest="maxResults",
help="Max YouTube results", default=25)
parser.add_option("--type", dest="type",
help="YouTube result type: video, playlist, or channel", default="channel")
(options, args) = parser.parse_args()
mid = get_topic_id(options)
youtube_search(mid, options) | Python |
#!/usr/bin/python2.6
import gflags
import httplib2
import os
import sys
import time
from apiclient.discovery import build
from apiclient.errors import HttpError
from apiclient.http import MediaFileUpload
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
FLAGS = gflags.FLAGS
gflags.DEFINE_string('file', None, 'Video file to upload')
gflags.DEFINE_string('title', 'Test title', 'Video title')
gflags.DEFINE_string('description', 'Test description', 'Video description')
gflags.DEFINE_string('category', 'VIDEO_CATEGORY_PEOPLE', 'Video category')
gflags.DEFINE_string('keywords', '', 'Video keywords, comma separated')
gflags.DEFINE_string('privacyStatus', 'PRIVACY_UNLISTED', 'Video privacy status')
def main(argv):
# Let the gflags module process the command-line arguments
try:
argv = FLAGS(argv)
except gflags.FlagsError, e:
print '%s\nUsage: %s ARGS\n%s' % (e, argv[0], FLAGS)
sys.exit(1)
print FLAGS.file
if FLAGS.file is None or not os.path.exists(FLAGS.file):
sys.exit('Please specify a valid file using the --file= parameter.')
flow = flow_from_clientsecrets('v3_client_secrets.json',
scope='https://www.googleapis.com/auth/youtube.upload')
storage = Storage('ru-oauth2.dat')
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
youtube = build('youtube', 'v3alpha',
http=credentials.authorize(httplib2.Http()))
insert_request = youtube.videos().insert(
body = dict(
snippet = dict(
title=FLAGS.title,
description=FLAGS.description,
tags=FLAGS.keywords.split(','),
categoryId=FLAGS.category
),
status = dict(
privacyStatus=FLAGS.privacyStatus
)
),
media_body = MediaFileUpload(FLAGS.file, chunksize=-1, resumable=True)
)
insert_request.headers['Slug'] = 'test_file'
response = None
backoff = 1
while response is None:
try:
status, response = insert_request.next_chunk()
print '"%s" (video ID: %s) was successfully uploaded.' % (FLAGS.title,
response['id'])
except HttpError, e:
print 'An HTTP error %d occurred:\n%s' % (e.resp.status, e.content)
if e.resp.status in [500, 502, 503, 504]:
backoff *= 2
if backoff > 900:
exit('No longer attempting to retry.')
print 'Sleeping %d seconds and then retrying ...' % (backoff)
time.sleep(backoff)
if __name__ == '__main__':
main(sys.argv)
| Python |
#!/usr/bin/python2.6
#
# Simple http server to emulate api.playfoursquare.com
import logging
import shutil
import sys
import urlparse
import SimpleHTTPServer
import BaseHTTPServer
class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Handle playfoursquare.com requests, for testing."""
def do_GET(self):
logging.warn('do_GET: %s, %s', self.command, self.path)
url = urlparse.urlparse(self.path)
logging.warn('do_GET: %s', url)
query = urlparse.parse_qs(url.query)
query_keys = [pair[0] for pair in query]
response = self.handle_url(url)
if response != None:
self.send_200()
shutil.copyfileobj(response, self.wfile)
self.wfile.close()
do_POST = do_GET
def handle_url(self, url):
path = None
if url.path == '/v1/venue':
path = '../captures/api/v1/venue.xml'
elif url.path == '/v1/addvenue':
path = '../captures/api/v1/venue.xml'
elif url.path == '/v1/venues':
path = '../captures/api/v1/venues.xml'
elif url.path == '/v1/user':
path = '../captures/api/v1/user.xml'
elif url.path == '/v1/checkcity':
path = '../captures/api/v1/checkcity.xml'
elif url.path == '/v1/checkins':
path = '../captures/api/v1/checkins.xml'
elif url.path == '/v1/cities':
path = '../captures/api/v1/cities.xml'
elif url.path == '/v1/switchcity':
path = '../captures/api/v1/switchcity.xml'
elif url.path == '/v1/tips':
path = '../captures/api/v1/tips.xml'
elif url.path == '/v1/checkin':
path = '../captures/api/v1/checkin.xml'
elif url.path == '/history/12345.rss':
path = '../captures/api/v1/feed.xml'
if path is None:
self.send_error(404)
else:
logging.warn('Using: %s' % path)
return open(path)
def send_200(self):
self.send_response(200)
self.send_header('Content-type', 'text/xml')
self.end_headers()
def main():
if len(sys.argv) > 1:
port = int(sys.argv[1])
else:
port = 8080
server_address = ('0.0.0.0', port)
httpd = BaseHTTPServer.HTTPServer(server_address, RequestHandler)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
httpd.serve_forever()
if __name__ == '__main__':
main()
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.