code stringlengths 1 1.72M | language stringclasses 1
value |
|---|---|
#
# Copyright (C) 2010 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from command import Command
from git_command import GitCommand
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB
from error import GitError
class Rebase(Command):
common = True
helpSummary = "Rebase local branches on upstream branch"
helpUsage = """
%prog {[<project>...] | -i <project>...}
"""
helpDescription = """
'%prog' uses git rebase to move local changes in the current topic branch to
the HEAD of the upstream history, useful when you have made commits in a topic
branch but need to incorporate new upstream changes "underneath" them.
"""
def _Options(self, p):
p.add_option('-i', '--interactive',
dest="interactive", action="store_true",
help="interactive rebase (single project only)")
p.add_option('-f', '--force-rebase',
dest='force_rebase', action='store_true',
help='Pass --force-rebase to git rebase')
p.add_option('--no-ff',
dest='no_ff', action='store_true',
help='Pass --no-ff to git rebase')
p.add_option('-q', '--quiet',
dest='quiet', action='store_true',
help='Pass --quiet to git rebase')
p.add_option('--autosquash',
dest='autosquash', action='store_true',
help='Pass --autosquash to git rebase')
p.add_option('--whitespace',
dest='whitespace', action='store', metavar='WS',
help='Pass --whitespace to git rebase')
def Execute(self, opt, args):
all = self.GetProjects(args)
one_project = len(all) == 1
if opt.interactive and not one_project:
print >>sys.stderr, 'error: interactive rebase not supported with multiple projects'
return -1
for project in all:
cb = project.CurrentBranch
if not cb:
if one_project:
print >>sys.stderr, "error: project %s has a detatched HEAD" % project.relpath
return -1
# ignore branches with detatched HEADs
continue
upbranch = project.GetBranch(cb)
if not upbranch.LocalMerge:
if one_project:
print >>sys.stderr, "error: project %s does not track any remote branches" % project.relpath
return -1
# ignore branches without remotes
continue
args = ["rebase"]
if opt.whitespace:
args.append('--whitespace=%s' % opt.whitespace)
if opt.quiet:
args.append('--quiet')
if opt.force_rebase:
args.append('--force-rebase')
if opt.no_ff:
args.append('--no-ff')
if opt.autosquash:
args.append('--autosquash')
if opt.interactive:
args.append("-i")
args.append(upbranch.LocalMerge)
print >>sys.stderr, '# %s: rebasing %s -> %s' % \
(project.relpath, cb, upbranch.LocalMerge)
if GitCommand(project, args).Wait() != 0:
return -1
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
from formatter import AbstractFormatter, DumbWriter
from color import Coloring
from command import PagedCommand, MirrorSafeCommand
class Help(PagedCommand, MirrorSafeCommand):
common = False
helpSummary = "Display detailed help on a command"
helpUsage = """
%prog [--all|command]
"""
helpDescription = """
Displays detailed usage information about a command.
"""
def _PrintAllCommands(self):
print 'usage: repo COMMAND [ARGS]'
print """
The complete list of recognized repo commands are:
"""
commandNames = self.commands.keys()
commandNames.sort()
maxlen = 0
for name in commandNames:
maxlen = max(maxlen, len(name))
fmt = ' %%-%ds %%s' % maxlen
for name in commandNames:
command = self.commands[name]
try:
summary = command.helpSummary.strip()
except AttributeError:
summary = ''
print fmt % (name, summary)
print """
See 'repo help <command>' for more information on a specific command.
"""
def _PrintCommonCommands(self):
print 'usage: repo COMMAND [ARGS]'
print """
The most commonly used repo commands are:
"""
commandNames = [name
for name in self.commands.keys()
if self.commands[name].common]
commandNames.sort()
maxlen = 0
for name in commandNames:
maxlen = max(maxlen, len(name))
fmt = ' %%-%ds %%s' % maxlen
for name in commandNames:
command = self.commands[name]
try:
summary = command.helpSummary.strip()
except AttributeError:
summary = ''
print fmt % (name, summary)
print """
See 'repo help <command>' for more information on a specific command.
See 'repo help --all' for a complete list of recognized commands.
"""
def _PrintCommandHelp(self, cmd):
class _Out(Coloring):
def __init__(self, gc):
Coloring.__init__(self, gc, 'help')
self.heading = self.printer('heading', attr='bold')
self.wrap = AbstractFormatter(DumbWriter())
def _PrintSection(self, heading, bodyAttr):
try:
body = getattr(cmd, bodyAttr)
except AttributeError:
return
if body == '' or body is None:
return
self.nl()
self.heading('%s', heading)
self.nl()
self.heading('%s', ''.ljust(len(heading), '-'))
self.nl()
me = 'repo %s' % cmd.NAME
body = body.strip()
body = body.replace('%prog', me)
asciidoc_hdr = re.compile(r'^\n?([^\n]{1,})\n([=~-]{2,})$')
for para in body.split("\n\n"):
if para.startswith(' '):
self.write('%s', para)
self.nl()
self.nl()
continue
m = asciidoc_hdr.match(para)
if m:
title = m.group(1)
type = m.group(2)
if type[0] in ('=', '-'):
p = self.heading
else:
def _p(fmt, *args):
self.write(' ')
self.heading(fmt, *args)
p = _p
p('%s', title)
self.nl()
p('%s', ''.ljust(len(title),type[0]))
self.nl()
continue
self.wrap.add_flowing_data(para)
self.wrap.end_paragraph(1)
self.wrap.end_paragraph(0)
out = _Out(self.manifest.globalConfig)
out._PrintSection('Summary', 'helpSummary')
cmd.OptionParser.print_help()
out._PrintSection('Description', 'helpDescription')
def _Options(self, p):
p.add_option('-a', '--all',
dest='show_all', action='store_true',
help='show the complete list of commands')
def Execute(self, opt, args):
if len(args) == 0:
if opt.show_all:
self._PrintAllCommands()
else:
self._PrintCommonCommands()
elif len(args) == 1:
name = args[0]
try:
cmd = self.commands[name]
except KeyError:
print >>sys.stderr, "repo: '%s' is not a repo command." % name
sys.exit(1)
cmd.repodir = self.repodir
self._PrintCommandHelp(cmd)
else:
self._PrintCommandHelp(self)
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
REPO_TRACE = 'REPO_TRACE'
try:
_TRACE = os.environ[REPO_TRACE] == '1'
except KeyError:
_TRACE = False
def IsTrace():
return _TRACE
def SetTrace():
global _TRACE
_TRACE = True
def Trace(fmt, *args):
if IsTrace():
print >>sys.stderr, fmt % args
| Python |
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import filecmp
import os
import re
import shutil
import stat
import sys
import urllib2
from color import Coloring
from git_command import GitCommand
from git_config import GitConfig, IsId
from error import GitError, ImportError, UploadError
from error import ManifestInvalidRevisionError
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB
def _lwrite(path, content):
lock = '%s.lock' % path
fd = open(lock, 'wb')
try:
fd.write(content)
finally:
fd.close()
try:
os.rename(lock, path)
except OSError:
os.remove(lock)
raise
def _error(fmt, *args):
msg = fmt % args
print >>sys.stderr, 'error: %s' % msg
def not_rev(r):
return '^' + r
def sq(r):
return "'" + r.replace("'", "'\''") + "'"
hook_list = None
def repo_hooks():
global hook_list
if hook_list is None:
d = os.path.abspath(os.path.dirname(__file__))
d = os.path.join(d , 'hooks')
hook_list = map(lambda x: os.path.join(d, x), os.listdir(d))
return hook_list
def relpath(dst, src):
src = os.path.dirname(src)
top = os.path.commonprefix([dst, src])
if top.endswith('/'):
top = top[:-1]
else:
top = os.path.dirname(top)
tmp = src
rel = ''
while top != tmp:
rel += '../'
tmp = os.path.dirname(tmp)
return rel + dst[len(top) + 1:]
class DownloadedChange(object):
_commit_cache = None
def __init__(self, project, base, change_id, ps_id, commit):
self.project = project
self.base = base
self.change_id = change_id
self.ps_id = ps_id
self.commit = commit
@property
def commits(self):
if self._commit_cache is None:
self._commit_cache = self.project.bare_git.rev_list(
'--abbrev=8',
'--abbrev-commit',
'--pretty=oneline',
'--reverse',
'--date-order',
not_rev(self.base),
self.commit,
'--')
return self._commit_cache
class ReviewableBranch(object):
_commit_cache = None
def __init__(self, project, branch, base):
self.project = project
self.branch = branch
self.base = base
@property
def name(self):
return self.branch.name
@property
def commits(self):
if self._commit_cache is None:
self._commit_cache = self.project.bare_git.rev_list(
'--abbrev=8',
'--abbrev-commit',
'--pretty=oneline',
'--reverse',
'--date-order',
not_rev(self.base),
R_HEADS + self.name,
'--')
return self._commit_cache
@property
def unabbrev_commits(self):
r = dict()
for commit in self.project.bare_git.rev_list(
not_rev(self.base),
R_HEADS + self.name,
'--'):
r[commit[0:8]] = commit
return r
@property
def date(self):
return self.project.bare_git.log(
'--pretty=format:%cd',
'-n', '1',
R_HEADS + self.name,
'--')
def UploadForReview(self, people, auto_topic=False):
self.project.UploadForReview(self.name,
people,
auto_topic=auto_topic)
def GetPublishedRefs(self):
refs = {}
output = self.project.bare_git.ls_remote(
self.branch.remote.SshReviewUrl(self.project.UserEmail),
'refs/changes/*')
for line in output.split('\n'):
try:
(sha, ref) = line.split()
refs[sha] = ref
except ValueError:
pass
return refs
class StatusColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'status')
self.project = self.printer('header', attr = 'bold')
self.branch = self.printer('header', attr = 'bold')
self.nobranch = self.printer('nobranch', fg = 'red')
self.important = self.printer('important', fg = 'red')
self.added = self.printer('added', fg = 'green')
self.changed = self.printer('changed', fg = 'red')
self.untracked = self.printer('untracked', fg = 'red')
class DiffColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'diff')
self.project = self.printer('header', attr = 'bold')
class _CopyFile:
def __init__(self, src, dest, abssrc, absdest):
self.src = src
self.dest = dest
self.abs_src = abssrc
self.abs_dest = absdest
def _Copy(self):
src = self.abs_src
dest = self.abs_dest
# copy file if it does not exist or is out of date
if not os.path.exists(dest) or not filecmp.cmp(src, dest):
try:
# remove existing file first, since it might be read-only
if os.path.exists(dest):
os.remove(dest)
else:
dir = os.path.dirname(dest)
if not os.path.isdir(dir):
os.makedirs(dir)
shutil.copy(src, dest)
# make the file read-only
mode = os.stat(dest)[stat.ST_MODE]
mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
os.chmod(dest, mode)
except IOError:
_error('Cannot copy file %s to %s', src, dest)
class RemoteSpec(object):
def __init__(self,
name,
url = None,
review = None):
self.name = name
self.url = url
self.review = review
class Project(object):
def __init__(self,
manifest,
name,
remote,
gitdir,
worktree,
relpath,
revisionExpr,
revisionId):
self.manifest = manifest
self.name = name
self.remote = remote
self.gitdir = gitdir.replace('\\', '/')
if worktree:
self.worktree = worktree.replace('\\', '/')
else:
self.worktree = None
self.relpath = relpath
self.revisionExpr = revisionExpr
if revisionId is None \
and revisionExpr \
and IsId(revisionExpr):
self.revisionId = revisionExpr
else:
self.revisionId = revisionId
self.snapshots = {}
self.copyfiles = []
self.config = GitConfig.ForRepository(
gitdir = self.gitdir,
defaults = self.manifest.globalConfig)
if self.worktree:
self.work_git = self._GitGetByExec(self, bare=False)
else:
self.work_git = None
self.bare_git = self._GitGetByExec(self, bare=True)
self.bare_ref = GitRefs(gitdir)
@property
def Exists(self):
return os.path.isdir(self.gitdir)
@property
def CurrentBranch(self):
"""Obtain the name of the currently checked out branch.
The branch name omits the 'refs/heads/' prefix.
None is returned if the project is on a detached HEAD.
"""
b = self.work_git.GetHead()
if b.startswith(R_HEADS):
return b[len(R_HEADS):]
return None
def IsRebaseInProgress(self):
w = self.worktree
g = os.path.join(w, '.git')
return os.path.exists(os.path.join(g, 'rebase-apply')) \
or os.path.exists(os.path.join(g, 'rebase-merge')) \
or os.path.exists(os.path.join(w, '.dotest'))
def IsDirty(self, consider_untracked=True):
"""Is the working directory modified in some way?
"""
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
if self.work_git.DiffZ('diff-index','-M','--cached',HEAD):
return True
if self.work_git.DiffZ('diff-files'):
return True
if consider_untracked and self.work_git.LsOthers():
return True
return False
_userident_name = None
_userident_email = None
@property
def UserName(self):
"""Obtain the user's personal name.
"""
if self._userident_name is None:
self._LoadUserIdentity()
return self._userident_name
@property
def UserEmail(self):
"""Obtain the user's email address. This is very likely
to be their Gerrit login.
"""
if self._userident_email is None:
self._LoadUserIdentity()
return self._userident_email
def _LoadUserIdentity(self):
u = self.bare_git.var('GIT_COMMITTER_IDENT')
m = re.compile("^(.*) <([^>]*)> ").match(u)
if m:
self._userident_name = m.group(1)
self._userident_email = m.group(2)
else:
self._userident_name = ''
self._userident_email = ''
def GetRemote(self, name):
"""Get the configuration for a single remote.
"""
return self.config.GetRemote(name)
def GetBranch(self, name):
"""Get the configuration for a single branch.
"""
return self.config.GetBranch(name)
def GetBranches(self):
"""Get all existing local branches.
"""
current = self.CurrentBranch
all = self._allrefs
heads = {}
pubd = {}
for name, id in all.iteritems():
if name.startswith(R_HEADS):
name = name[len(R_HEADS):]
b = self.GetBranch(name)
b.current = name == current
b.published = None
b.revision = id
heads[name] = b
for name, id in all.iteritems():
if name.startswith(R_PUB):
name = name[len(R_PUB):]
b = heads.get(name)
if b:
b.published = id
return heads
## Status Display ##
def HasChanges(self):
"""Returns true if there are uncommitted changes.
"""
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
if self.IsRebaseInProgress():
return True
if self.work_git.DiffZ('diff-index', '--cached', HEAD):
return True
if self.work_git.DiffZ('diff-files'):
return True
if self.work_git.LsOthers():
return True
return False
def PrintWorkTreeStatus(self):
"""Prints the status of the repository to stdout.
"""
if not os.path.isdir(self.worktree):
print ''
print 'project %s/' % self.relpath
print ' missing (run "repo sync")'
return
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
rb = self.IsRebaseInProgress()
di = self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD)
df = self.work_git.DiffZ('diff-files')
do = self.work_git.LsOthers()
if not rb and not di and not df and not do:
return 'CLEAN'
out = StatusColoring(self.config)
out.project('project %-40s', self.relpath + '/')
branch = self.CurrentBranch
if branch is None:
out.nobranch('(*** NO BRANCH ***)')
else:
out.branch('branch %s', branch)
out.nl()
if rb:
out.important('prior sync failed; rebase still in progress')
out.nl()
paths = list()
paths.extend(di.keys())
paths.extend(df.keys())
paths.extend(do)
paths = list(set(paths))
paths.sort()
for p in paths:
try: i = di[p]
except KeyError: i = None
try: f = df[p]
except KeyError: f = None
if i: i_status = i.status.upper()
else: i_status = '-'
if f: f_status = f.status.lower()
else: f_status = '-'
if i and i.src_path:
line = ' %s%s\t%s => %s (%s%%)' % (i_status, f_status,
i.src_path, p, i.level)
else:
line = ' %s%s\t%s' % (i_status, f_status, p)
if i and not f:
out.added('%s', line)
elif (i and f) or (not i and f):
out.changed('%s', line)
elif not i and not f:
out.untracked('%s', line)
else:
out.write('%s', line)
out.nl()
return 'DIRTY'
def PrintWorkTreeDiff(self):
"""Prints the status of the repository to stdout.
"""
out = DiffColoring(self.config)
cmd = ['diff']
if out.is_on:
cmd.append('--color')
cmd.append(HEAD)
cmd.append('--')
p = GitCommand(self,
cmd,
capture_stdout = True,
capture_stderr = True)
has_diff = False
for line in p.process.stdout:
if not has_diff:
out.nl()
out.project('project %s/' % self.relpath)
out.nl()
has_diff = True
print line[:-1]
p.Wait()
## Publish / Upload ##
def WasPublished(self, branch, all=None):
"""Was the branch published (uploaded) for code review?
If so, returns the SHA-1 hash of the last published
state for the branch.
"""
key = R_PUB + branch
if all is None:
try:
return self.bare_git.rev_parse(key)
except GitError:
return None
else:
try:
return all[key]
except KeyError:
return None
def CleanPublishedCache(self, all=None):
"""Prunes any stale published refs.
"""
if all is None:
all = self._allrefs
heads = set()
canrm = {}
for name, id in all.iteritems():
if name.startswith(R_HEADS):
heads.add(name)
elif name.startswith(R_PUB):
canrm[name] = id
for name, id in canrm.iteritems():
n = name[len(R_PUB):]
if R_HEADS + n not in heads:
self.bare_git.DeleteRef(name, id)
def GetUploadableBranches(self):
"""List any branches which can be uploaded for review.
"""
heads = {}
pubed = {}
for name, id in self._allrefs.iteritems():
if name.startswith(R_HEADS):
heads[name[len(R_HEADS):]] = id
elif name.startswith(R_PUB):
pubed[name[len(R_PUB):]] = id
ready = []
for branch, id in heads.iteritems():
if branch in pubed and pubed[branch] == id:
continue
rb = self.GetUploadableBranch(branch)
if rb:
ready.append(rb)
return ready
def GetUploadableBranch(self, branch_name):
"""Get a single uploadable branch, or None.
"""
branch = self.GetBranch(branch_name)
base = branch.LocalMerge
if branch.LocalMerge:
rb = ReviewableBranch(self, branch, base)
if rb.commits:
return rb
return None
def UploadForReview(self, branch=None,
people=([],[]),
auto_topic=False):
"""Uploads the named branch for code review.
"""
if branch is None:
branch = self.CurrentBranch
if branch is None:
raise GitError('not currently on a branch')
branch = self.GetBranch(branch)
if not branch.LocalMerge:
raise GitError('branch %s does not track a remote' % branch.name)
if not branch.remote.review:
raise GitError('remote %s has no review url' % branch.remote.name)
dest_branch = branch.merge
if not dest_branch.startswith(R_HEADS):
dest_branch = R_HEADS + dest_branch
if not branch.remote.projectname:
branch.remote.projectname = self.name
branch.remote.Save()
if branch.remote.ReviewProtocol == 'ssh':
if dest_branch.startswith(R_HEADS):
dest_branch = dest_branch[len(R_HEADS):]
rp = ['gerrit receive-pack']
for e in people[0]:
rp.append('--reviewer=%s' % sq(e))
for e in people[1]:
rp.append('--cc=%s' % sq(e))
ref_spec = '%s:refs/for/%s' % (R_HEADS + branch.name, dest_branch)
if auto_topic:
ref_spec = ref_spec + '/' + branch.name
cmd = ['push']
cmd.append('--receive-pack=%s' % " ".join(rp))
cmd.append(branch.remote.SshReviewUrl(self.UserEmail))
cmd.append(ref_spec)
if GitCommand(self, cmd, bare = True).Wait() != 0:
raise UploadError('Upload failed')
else:
raise UploadError('Unsupported protocol %s' \
% branch.remote.review)
msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
self.bare_git.UpdateRef(R_PUB + branch.name,
R_HEADS + branch.name,
message = msg)
## Sync ##
def Sync_NetworkHalf(self, quiet=False):
"""Perform only the network IO portion of the sync process.
Local working directory/branch state is not affected.
"""
is_new = not self.Exists
if is_new:
if not quiet:
print >>sys.stderr
print >>sys.stderr, 'Initializing project %s ...' % self.name
self._InitGitDir()
self._InitRemote()
if not self._RemoteFetch(initial=is_new, quiet=quiet):
return False
#Check that the requested ref was found after fetch
#
try:
self.GetRevisionId()
except ManifestInvalidRevisionError:
# if the ref is a tag. We can try fetching
# the tag manually as a last resort
#
rev = self.revisionExpr
if rev.startswith(R_TAGS):
self._RemoteFetch(None, rev[len(R_TAGS):], quiet=quiet)
if self.worktree:
self.manifest.SetMRefs(self)
else:
self._InitMirrorHead()
try:
os.remove(os.path.join(self.gitdir, 'FETCH_HEAD'))
except OSError:
pass
return True
def PostRepoUpgrade(self):
self._InitHooks()
def _CopyFiles(self):
for file in self.copyfiles:
file._Copy()
def GetRevisionId(self, all=None):
if self.revisionId:
return self.revisionId
rem = self.GetRemote(self.remote.name)
rev = rem.ToLocal(self.revisionExpr)
if all is not None and rev in all:
return all[rev]
try:
return self.bare_git.rev_parse('--verify', '%s^0' % rev)
except GitError:
raise ManifestInvalidRevisionError(
'revision %s in %s not found' % (self.revisionExpr,
self.name))
def Sync_LocalHalf(self, syncbuf):
"""Perform only the local IO portion of the sync process.
Network access is not required.
"""
self._InitWorkTree()
all = self.bare_ref.all
self.CleanPublishedCache(all)
revid = self.GetRevisionId(all)
head = self.work_git.GetHead()
if head.startswith(R_HEADS):
branch = head[len(R_HEADS):]
try:
head = all[head]
except KeyError:
head = None
else:
branch = None
if branch is None or syncbuf.detach_head:
# Currently on a detached HEAD. The user is assumed to
# not have any local modifications worth worrying about.
#
if self.IsRebaseInProgress():
syncbuf.fail(self, _PriorSyncFailedError())
return
if head == revid:
# No changes; don't do anything further.
#
return
lost = self._revlist(not_rev(revid), HEAD)
if lost:
syncbuf.info(self, "discarding %d commits", len(lost))
try:
self._Checkout(revid, quiet=True)
except GitError, e:
syncbuf.fail(self, e)
return
self._CopyFiles()
return
if head == revid:
# No changes; don't do anything further.
#
return
branch = self.GetBranch(branch)
if not branch.LocalMerge:
# The current branch has no tracking configuration.
# Jump off it to a deatched HEAD.
#
syncbuf.info(self,
"leaving %s; does not track upstream",
branch.name)
try:
self._Checkout(revid, quiet=True)
except GitError, e:
syncbuf.fail(self, e)
return
self._CopyFiles()
return
upstream_gain = self._revlist(not_rev(HEAD), revid)
pub = self.WasPublished(branch.name, all)
if pub:
not_merged = self._revlist(not_rev(revid), pub)
if not_merged:
if upstream_gain:
# The user has published this branch and some of those
# commits are not yet merged upstream. We do not want
# to rewrite the published commits so we punt.
#
syncbuf.fail(self,
"branch %s is published (but not merged) and is now %d commits behind"
% (branch.name, len(upstream_gain)))
return
elif pub == head:
# All published commits are merged, and thus we are a
# strict subset. We can fast-forward safely.
#
def _doff():
self._FastForward(revid)
self._CopyFiles()
syncbuf.later1(self, _doff)
return
# Examine the local commits not in the remote. Find the
# last one attributed to this user, if any.
#
local_changes = self._revlist(not_rev(revid), HEAD, format='%H %ce')
last_mine = None
cnt_mine = 0
for commit in local_changes:
commit_id, committer_email = commit.split(' ', 1)
if committer_email == self.UserEmail:
last_mine = commit_id
cnt_mine += 1
if not upstream_gain and cnt_mine == len(local_changes):
return
if self.IsDirty(consider_untracked=False):
syncbuf.fail(self, _DirtyError())
return
# If the upstream switched on us, warn the user.
#
if branch.merge != self.revisionExpr:
if branch.merge and self.revisionExpr:
syncbuf.info(self,
'manifest switched %s...%s',
branch.merge,
self.revisionExpr)
elif branch.merge:
syncbuf.info(self,
'manifest no longer tracks %s',
branch.merge)
if cnt_mine < len(local_changes):
# Upstream rebased. Not everything in HEAD
# was created by this user.
#
syncbuf.info(self,
"discarding %d commits removed from upstream",
len(local_changes) - cnt_mine)
branch.remote = self.GetRemote(self.remote.name)
branch.merge = self.revisionExpr
branch.Save()
if cnt_mine > 0:
def _dorebase():
self._Rebase(upstream = '%s^1' % last_mine, onto = revid)
self._CopyFiles()
syncbuf.later2(self, _dorebase)
elif local_changes:
try:
self._ResetHard(revid)
self._CopyFiles()
except GitError, e:
syncbuf.fail(self, e)
return
else:
def _doff():
self._FastForward(revid)
self._CopyFiles()
syncbuf.later1(self, _doff)
def AddCopyFile(self, src, dest, absdest):
# dest should already be an absolute path, but src is project relative
# make src an absolute path
abssrc = os.path.join(self.worktree, src)
self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
def DownloadPatchSet(self, change_id, patch_id):
"""Download a single patch set of a single change to FETCH_HEAD.
"""
remote = self.GetRemote(self.remote.name)
cmd = ['fetch', remote.name]
cmd.append('refs/changes/%2.2d/%d/%d' \
% (change_id % 100, change_id, patch_id))
cmd.extend(map(lambda x: str(x), remote.fetch))
if GitCommand(self, cmd, bare=True).Wait() != 0:
return None
return DownloadedChange(self,
self.GetRevisionId(),
change_id,
patch_id,
self.bare_git.rev_parse('FETCH_HEAD'))
## Branch Management ##
def StartBranch(self, name):
"""Create a new branch off the manifest's revision.
"""
head = self.work_git.GetHead()
if head == (R_HEADS + name):
return True
all = self.bare_ref.all
if (R_HEADS + name) in all:
return GitCommand(self,
['checkout', name, '--'],
capture_stdout = True,
capture_stderr = True).Wait() == 0
branch = self.GetBranch(name)
branch.remote = self.GetRemote(self.remote.name)
branch.merge = self.revisionExpr
revid = self.GetRevisionId(all)
if head.startswith(R_HEADS):
try:
head = all[head]
except KeyError:
head = None
if revid and head and revid == head:
ref = os.path.join(self.gitdir, R_HEADS + name)
try:
os.makedirs(os.path.dirname(ref))
except OSError:
pass
_lwrite(ref, '%s\n' % revid)
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'ref: %s%s\n' % (R_HEADS, name))
branch.Save()
return True
if GitCommand(self,
['checkout', '-b', branch.name, revid],
capture_stdout = True,
capture_stderr = True).Wait() == 0:
branch.Save()
return True
return False
def CheckoutBranch(self, name):
"""Checkout a local topic branch.
"""
rev = R_HEADS + name
head = self.work_git.GetHead()
if head == rev:
# Already on the branch
#
return True
all = self.bare_ref.all
try:
revid = all[rev]
except KeyError:
# Branch does not exist in this project
#
return False
if head.startswith(R_HEADS):
try:
head = all[head]
except KeyError:
head = None
if head == revid:
# Same revision; just update HEAD to point to the new
# target branch, but otherwise take no other action.
#
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'ref: %s%s\n' % (R_HEADS, name))
return True
return GitCommand(self,
['checkout', name, '--'],
capture_stdout = True,
capture_stderr = True).Wait() == 0
def AbandonBranch(self, name):
"""Destroy a local topic branch.
"""
rev = R_HEADS + name
all = self.bare_ref.all
if rev not in all:
# Doesn't exist; assume already abandoned.
#
return True
head = self.work_git.GetHead()
if head == rev:
# We can't destroy the branch while we are sitting
# on it. Switch to a detached HEAD.
#
head = all[head]
revid = self.GetRevisionId(all)
if head == revid:
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'%s\n' % revid)
else:
self._Checkout(revid, quiet=True)
return GitCommand(self,
['branch', '-D', name],
capture_stdout = True,
capture_stderr = True).Wait() == 0
def PruneHeads(self):
"""Prune any topic branches already merged into upstream.
"""
cb = self.CurrentBranch
kill = []
left = self._allrefs
for name in left.keys():
if name.startswith(R_HEADS):
name = name[len(R_HEADS):]
if cb is None or name != cb:
kill.append(name)
rev = self.GetRevisionId(left)
if cb is not None \
and not self._revlist(HEAD + '...' + rev) \
and not self.IsDirty(consider_untracked = False):
self.work_git.DetachHead(HEAD)
kill.append(cb)
if kill:
old = self.bare_git.GetHead()
if old is None:
old = 'refs/heads/please_never_use_this_as_a_branch_name'
try:
self.bare_git.DetachHead(rev)
b = ['branch', '-d']
b.extend(kill)
b = GitCommand(self, b, bare=True,
capture_stdout=True,
capture_stderr=True)
b.Wait()
finally:
self.bare_git.SetHead(old)
left = self._allrefs
for branch in kill:
if (R_HEADS + branch) not in left:
self.CleanPublishedCache()
break
if cb and cb not in kill:
kill.append(cb)
kill.sort()
kept = []
for branch in kill:
if (R_HEADS + branch) in left:
branch = self.GetBranch(branch)
base = branch.LocalMerge
if not base:
base = rev
kept.append(ReviewableBranch(self, branch, base))
return kept
## Direct Git Commands ##
def _RemoteFetch(self, name=None, tag=None,
initial=False,
quiet=False):
if not name:
name = self.remote.name
ssh_proxy = False
if self.GetRemote(name).PreConnectFetch():
ssh_proxy = True
if initial:
alt = os.path.join(self.gitdir, 'objects/info/alternates')
try:
fd = open(alt, 'rb')
try:
ref_dir = fd.readline()
if ref_dir and ref_dir.endswith('\n'):
ref_dir = ref_dir[:-1]
finally:
fd.close()
except IOError, e:
ref_dir = None
if ref_dir and 'objects' == os.path.basename(ref_dir):
ref_dir = os.path.dirname(ref_dir)
packed_refs = os.path.join(self.gitdir, 'packed-refs')
remote = self.GetRemote(name)
all = self.bare_ref.all
ids = set(all.values())
tmp = set()
for r, id in GitRefs(ref_dir).all.iteritems():
if r not in all:
if r.startswith(R_TAGS) or remote.WritesTo(r):
all[r] = id
ids.add(id)
continue
if id in ids:
continue
r = 'refs/_alt/%s' % id
all[r] = id
ids.add(id)
tmp.add(r)
ref_names = list(all.keys())
ref_names.sort()
tmp_packed = ''
old_packed = ''
for r in ref_names:
line = '%s %s\n' % (all[r], r)
tmp_packed += line
if r not in tmp:
old_packed += line
_lwrite(packed_refs, tmp_packed)
else:
ref_dir = None
cmd = ['fetch']
if quiet:
cmd.append('--quiet')
if not self.worktree:
cmd.append('--update-head-ok')
cmd.append(name)
if tag is not None:
cmd.append('tag')
cmd.append(tag)
ok = GitCommand(self,
cmd,
bare = True,
ssh_proxy = ssh_proxy).Wait() == 0
if initial:
if ref_dir:
if old_packed != '':
_lwrite(packed_refs, old_packed)
else:
os.remove(packed_refs)
self.bare_git.pack_refs('--all', '--prune')
return ok
def _Checkout(self, rev, quiet=False):
cmd = ['checkout']
if quiet:
cmd.append('-q')
cmd.append(rev)
cmd.append('--')
if GitCommand(self, cmd).Wait() != 0:
if self._allrefs:
raise GitError('%s checkout %s ' % (self.name, rev))
def _ResetHard(self, rev, quiet=True):
cmd = ['reset', '--hard']
if quiet:
cmd.append('-q')
cmd.append(rev)
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s reset --hard %s ' % (self.name, rev))
def _Rebase(self, upstream, onto = None):
cmd = ['rebase']
if onto is not None:
cmd.extend(['--onto', onto])
cmd.append(upstream)
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s rebase %s ' % (self.name, upstream))
def _FastForward(self, head):
cmd = ['merge', head]
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s merge %s ' % (self.name, head))
def _InitGitDir(self):
if not os.path.exists(self.gitdir):
os.makedirs(self.gitdir)
self.bare_git.init()
mp = self.manifest.manifestProject
ref_dir = mp.config.GetString('repo.reference')
if ref_dir:
mirror_git = os.path.join(ref_dir, self.name + '.git')
repo_git = os.path.join(ref_dir, '.repo', 'projects',
self.relpath + '.git')
if os.path.exists(mirror_git):
ref_dir = mirror_git
elif os.path.exists(repo_git):
ref_dir = repo_git
else:
ref_dir = None
if ref_dir:
_lwrite(os.path.join(self.gitdir, 'objects/info/alternates'),
os.path.join(ref_dir, 'objects') + '\n')
if self.manifest.IsMirror:
self.config.SetString('core.bare', 'true')
else:
self.config.SetString('core.bare', None)
hooks = self._gitdir_path('hooks')
try:
to_rm = os.listdir(hooks)
except OSError:
to_rm = []
for old_hook in to_rm:
os.remove(os.path.join(hooks, old_hook))
self._InitHooks()
m = self.manifest.manifestProject.config
for key in ['user.name', 'user.email']:
if m.Has(key, include_defaults = False):
self.config.SetString(key, m.GetString(key))
def _InitHooks(self):
hooks = self._gitdir_path('hooks')
if not os.path.exists(hooks):
os.makedirs(hooks)
for stock_hook in repo_hooks():
name = os.path.basename(stock_hook)
if name in ('commit-msg') and not self.remote.review:
# Don't install a Gerrit Code Review hook if this
# project does not appear to use it for reviews.
#
continue
dst = os.path.join(hooks, name)
if os.path.islink(dst):
continue
if os.path.exists(dst):
if filecmp.cmp(stock_hook, dst, shallow=False):
os.remove(dst)
else:
_error("%s: Not replacing %s hook", self.relpath, name)
continue
try:
os.symlink(relpath(stock_hook, dst), dst)
except OSError, e:
if e.errno == errno.EPERM:
raise GitError('filesystem must support symlinks')
else:
raise
def _InitRemote(self):
if self.remote.url:
remote = self.GetRemote(self.remote.name)
remote.url = self.remote.url
remote.review = self.remote.review
remote.projectname = self.name
if self.worktree:
remote.ResetFetch(mirror=False)
else:
remote.ResetFetch(mirror=True)
remote.Save()
def _InitMirrorHead(self):
self._InitAnyMRef(HEAD)
def _InitAnyMRef(self, ref):
cur = self.bare_ref.symref(ref)
if self.revisionId:
if cur != '' or self.bare_ref.get(ref) != self.revisionId:
msg = 'manifest set to %s' % self.revisionId
dst = self.revisionId + '^0'
self.bare_git.UpdateRef(ref, dst, message = msg, detach = True)
else:
remote = self.GetRemote(self.remote.name)
dst = remote.ToLocal(self.revisionExpr)
if cur != dst:
msg = 'manifest set to %s' % self.revisionExpr
self.bare_git.symbolic_ref('-m', msg, ref, dst)
def _LinkWorkTree(self, relink=False):
dotgit = os.path.join(self.worktree, '.git')
if not relink:
os.makedirs(dotgit)
for name in ['config',
'description',
'hooks',
'info',
'logs',
'objects',
'packed-refs',
'refs',
'rr-cache',
'svn']:
try:
src = os.path.join(self.gitdir, name)
dst = os.path.join(dotgit, name)
if relink:
os.remove(dst)
if os.path.islink(dst) or not os.path.exists(dst):
os.symlink(relpath(src, dst), dst)
else:
raise GitError('cannot overwrite a local work tree')
except OSError, e:
if e.errno == errno.EPERM:
raise GitError('filesystem must support symlinks')
else:
raise
def _InitWorkTree(self):
dotgit = os.path.join(self.worktree, '.git')
if not os.path.exists(dotgit):
self._LinkWorkTree()
_lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId())
cmd = ['read-tree', '--reset', '-u']
cmd.append('-v')
cmd.append(HEAD)
if GitCommand(self, cmd).Wait() != 0:
raise GitError("cannot initialize work tree")
self._CopyFiles()
def _gitdir_path(self, path):
return os.path.join(self.gitdir, path)
def _revlist(self, *args, **kw):
a = []
a.extend(args)
a.append('--')
return self.work_git.rev_list(*a, **kw)
@property
def _allrefs(self):
return self.bare_ref.all
class _GitGetByExec(object):
def __init__(self, project, bare):
self._project = project
self._bare = bare
def LsOthers(self):
p = GitCommand(self._project,
['ls-files',
'-z',
'--others',
'--exclude-standard'],
bare = False,
capture_stdout = True,
capture_stderr = True)
if p.Wait() == 0:
out = p.stdout
if out:
return out[:-1].split("\0")
return []
def DiffZ(self, name, *args):
cmd = [name]
cmd.append('-z')
cmd.extend(args)
p = GitCommand(self._project,
cmd,
bare = False,
capture_stdout = True,
capture_stderr = True)
try:
out = p.process.stdout.read()
r = {}
if out:
out = iter(out[:-1].split('\0'))
while out:
try:
info = out.next()
path = out.next()
except StopIteration:
break
class _Info(object):
def __init__(self, path, omode, nmode, oid, nid, state):
self.path = path
self.src_path = None
self.old_mode = omode
self.new_mode = nmode
self.old_id = oid
self.new_id = nid
if len(state) == 1:
self.status = state
self.level = None
else:
self.status = state[:1]
self.level = state[1:]
while self.level.startswith('0'):
self.level = self.level[1:]
info = info[1:].split(' ')
info =_Info(path, *info)
if info.status in ('R', 'C'):
info.src_path = info.path
info.path = out.next()
r[info.path] = info
return r
finally:
p.Wait()
def GetHead(self):
if self._bare:
path = os.path.join(self._project.gitdir, HEAD)
else:
path = os.path.join(self._project.worktree, '.git', HEAD)
fd = open(path, 'rb')
try:
line = fd.read()
finally:
fd.close()
if line.startswith('ref: '):
return line[5:-1]
return line[:-1]
def SetHead(self, ref, message=None):
cmdv = []
if message is not None:
cmdv.extend(['-m', message])
cmdv.append(HEAD)
cmdv.append(ref)
self.symbolic_ref(*cmdv)
def DetachHead(self, new, message=None):
cmdv = ['--no-deref']
if message is not None:
cmdv.extend(['-m', message])
cmdv.append(HEAD)
cmdv.append(new)
self.update_ref(*cmdv)
def UpdateRef(self, name, new, old=None,
message=None,
detach=False):
cmdv = []
if message is not None:
cmdv.extend(['-m', message])
if detach:
cmdv.append('--no-deref')
cmdv.append(name)
cmdv.append(new)
if old is not None:
cmdv.append(old)
self.update_ref(*cmdv)
def DeleteRef(self, name, old=None):
if not old:
old = self.rev_parse(name)
self.update_ref('-d', name, old)
self._project.bare_ref.deleted(name)
def rev_list(self, *args, **kw):
if 'format' in kw:
cmdv = ['log', '--pretty=format:%s' % kw['format']]
else:
cmdv = ['rev-list']
cmdv.extend(args)
p = GitCommand(self._project,
cmdv,
bare = self._bare,
capture_stdout = True,
capture_stderr = True)
r = []
for line in p.process.stdout:
if line[-1] == '\n':
line = line[:-1]
r.append(line)
if p.Wait() != 0:
raise GitError('%s rev-list %s: %s' % (
self._project.name,
str(args),
p.stderr))
return r
def __getattr__(self, name):
name = name.replace('_', '-')
def runner(*args):
cmdv = [name]
cmdv.extend(args)
p = GitCommand(self._project,
cmdv,
bare = self._bare,
capture_stdout = True,
capture_stderr = True)
if p.Wait() != 0:
raise GitError('%s %s: %s' % (
self._project.name,
name,
p.stderr))
r = p.stdout
if r.endswith('\n') and r.index('\n') == len(r) - 1:
return r[:-1]
return r
return runner
class _PriorSyncFailedError(Exception):
def __str__(self):
return 'prior sync failed; rebase still in progress'
class _DirtyError(Exception):
def __str__(self):
return 'contains uncommitted changes'
class _InfoMessage(object):
def __init__(self, project, text):
self.project = project
self.text = text
def Print(self, syncbuf):
syncbuf.out.info('%s/: %s', self.project.relpath, self.text)
syncbuf.out.nl()
class _Failure(object):
def __init__(self, project, why):
self.project = project
self.why = why
def Print(self, syncbuf):
syncbuf.out.fail('error: %s/: %s',
self.project.relpath,
str(self.why))
syncbuf.out.nl()
class _Later(object):
def __init__(self, project, action):
self.project = project
self.action = action
def Run(self, syncbuf):
out = syncbuf.out
out.project('project %s/', self.project.relpath)
out.nl()
try:
self.action()
out.nl()
return True
except GitError, e:
out.nl()
return False
class _SyncColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'reposync')
self.project = self.printer('header', attr = 'bold')
self.info = self.printer('info')
self.fail = self.printer('fail', fg='red')
class SyncBuffer(object):
def __init__(self, config, detach_head=False):
self._messages = []
self._failures = []
self._later_queue1 = []
self._later_queue2 = []
self.out = _SyncColoring(config)
self.out.redirect(sys.stderr)
self.detach_head = detach_head
self.clean = True
def info(self, project, fmt, *args):
self._messages.append(_InfoMessage(project, fmt % args))
def fail(self, project, err=None):
self._failures.append(_Failure(project, err))
self.clean = False
def later1(self, project, what):
self._later_queue1.append(_Later(project, what))
def later2(self, project, what):
self._later_queue2.append(_Later(project, what))
def Finish(self):
self._PrintMessages()
self._RunLater()
self._PrintMessages()
return self.clean
def _RunLater(self):
for q in ['_later_queue1', '_later_queue2']:
if not self._RunQueue(q):
return
def _RunQueue(self, queue):
for m in getattr(self, queue):
if not m.Run(self):
self.clean = False
return False
setattr(self, queue, [])
return True
def _PrintMessages(self):
for m in self._messages:
m.Print(self)
for m in self._failures:
m.Print(self)
self._messages = []
self._failures = []
class MetaProject(Project):
"""A special project housed under .repo.
"""
def __init__(self, manifest, name, gitdir, worktree, relpath=None):
repodir = manifest.repodir
if relpath is None:
relpath = '.repo/%s' % name
Project.__init__(self,
manifest = manifest,
name = name,
gitdir = gitdir,
worktree = worktree,
remote = RemoteSpec('origin'),
relpath = relpath,
revisionExpr = 'refs/heads/master',
revisionId = None)
def PreSync(self):
if self.Exists:
cb = self.CurrentBranch
if cb:
cb = self.GetBranch(cb)
if cb.merge:
self.revisionExpr = cb.merge
self.revisionId = None
if cb.remote and cb.remote.name:
self.remote.name = cb.remote.name
@property
def LastFetch(self):
try:
fh = os.path.join(self.gitdir, 'FETCH_HEAD')
return os.path.getmtime(fh)
except OSError:
return 0
@property
def HasChanges(self):
"""Has the remote received new commits not yet checked out?
"""
if not self.remote or not self.revisionExpr:
return False
all = self.bare_ref.all
revid = self.GetRevisionId(all)
head = self.work_git.GetHead()
if head.startswith(R_HEADS):
try:
head = all[head]
except KeyError:
head = None
if revid == head:
return False
elif self._revlist(not_rev(HEAD), revid):
return True
return False
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import sys
import subprocess
import tempfile
from error import EditorError
class Editor(object):
"""Manages the user's preferred text editor."""
_editor = None
globalConfig = None
@classmethod
def _GetEditor(cls):
if cls._editor is None:
cls._editor = cls._SelectEditor()
return cls._editor
@classmethod
def _SelectEditor(cls):
e = os.getenv('GIT_EDITOR')
if e:
return e
if cls.globalConfig:
e = cls.globalConfig.GetString('core.editor')
if e:
return e
e = os.getenv('VISUAL')
if e:
return e
e = os.getenv('EDITOR')
if e:
return e
if os.getenv('TERM') == 'dumb':
print >>sys.stderr,\
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
Tried to fall back to vi but terminal is dumb. Please configure at
least one of these before using this command."""
sys.exit(1)
return 'vi'
@classmethod
def EditString(cls, data):
"""Opens an editor to edit the given content.
Args:
data : the text to edit
Returns:
new value of edited text; None if editing did not succeed
"""
editor = cls._GetEditor()
if editor == ':':
return data
fd, path = tempfile.mkstemp()
try:
os.write(fd, data)
os.close(fd)
fd = None
if re.compile("^.*[$ \t'].*$").match(editor):
args = [editor + ' "$@"', 'sh']
shell = True
else:
args = [editor]
shell = False
args.append(path)
try:
rc = subprocess.Popen(args, shell=shell).wait()
except OSError, e:
raise EditorError('editor failed, %s: %s %s'
% (str(e), editor, path))
if rc != 0:
raise EditorError('editor failed with exit status %d: %s %s'
% (rc, editor, path))
fd2 = open(path)
try:
return fd2.read()
finally:
fd2.close()
finally:
if fd:
os.close(fd)
os.remove(path)
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from trace import Trace
HEAD = 'HEAD'
R_HEADS = 'refs/heads/'
R_TAGS = 'refs/tags/'
R_PUB = 'refs/published/'
class GitRefs(object):
def __init__(self, gitdir):
self._gitdir = gitdir
self._phyref = None
self._symref = None
self._mtime = {}
@property
def all(self):
self._EnsureLoaded()
return self._phyref
def get(self, name):
try:
return self.all[name]
except KeyError:
return ''
def deleted(self, name):
if self._phyref is not None:
if name in self._phyref:
del self._phyref[name]
if name in self._symref:
del self._symref[name]
if name in self._mtime:
del self._mtime[name]
def symref(self, name):
try:
self._EnsureLoaded()
return self._symref[name]
except KeyError:
return ''
def _EnsureLoaded(self):
if self._phyref is None or self._NeedUpdate():
self._LoadAll()
def _NeedUpdate(self):
Trace(': scan refs %s', self._gitdir)
for name, mtime in self._mtime.iteritems():
try:
if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
return True
except OSError:
return True
return False
def _LoadAll(self):
Trace(': load refs %s', self._gitdir)
self._phyref = {}
self._symref = {}
self._mtime = {}
self._ReadPackedRefs()
self._ReadLoose('refs/')
self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
scan = self._symref
attempts = 0
while scan and attempts < 5:
scan_next = {}
for name, dest in scan.iteritems():
if dest in self._phyref:
self._phyref[name] = self._phyref[dest]
else:
scan_next[name] = dest
scan = scan_next
attempts += 1
def _ReadPackedRefs(self):
path = os.path.join(self._gitdir, 'packed-refs')
try:
fd = open(path, 'rb')
mtime = os.path.getmtime(path)
except IOError:
return
except OSError:
return
try:
for line in fd:
if line[0] == '#':
continue
if line[0] == '^':
continue
line = line[:-1]
p = line.split(' ')
id = p[0]
name = p[1]
self._phyref[name] = id
finally:
fd.close()
self._mtime['packed-refs'] = mtime
def _ReadLoose(self, prefix):
base = os.path.join(self._gitdir, prefix)
for name in os.listdir(base):
p = os.path.join(base, name)
if os.path.isdir(p):
self._mtime[prefix] = os.path.getmtime(base)
self._ReadLoose(prefix + name + '/')
elif name.endswith('.lock'):
pass
else:
self._ReadLoose1(p, prefix + name)
def _ReadLoose1(self, path, name):
try:
fd = open(path, 'rb')
mtime = os.path.getmtime(path)
except OSError:
return
except IOError:
return
try:
id = fd.readline()
finally:
fd.close()
if not id:
return
id = id[:-1]
if id.startswith('ref: '):
self._symref[name] = id[5:]
else:
self._phyref[name] = id
self._mtime[name] = mtime
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ManifestParseError(Exception):
"""Failed to parse the manifest file.
"""
class ManifestInvalidRevisionError(Exception):
"""The revision value in a project is incorrect.
"""
class EditorError(Exception):
"""Unspecified error from the user's text editor.
"""
def __init__(self, reason):
self.reason = reason
def __str__(self):
return self.reason
class GitError(Exception):
"""Unspecified internal error from git.
"""
def __init__(self, command):
self.command = command
def __str__(self):
return self.command
class ImportError(Exception):
"""An import from a non-Git format cannot be performed.
"""
def __init__(self, reason):
self.reason = reason
def __str__(self):
return self.reason
class UploadError(Exception):
"""A bundle upload to Gerrit did not succeed.
"""
def __init__(self, reason):
self.reason = reason
def __str__(self):
return self.reason
class NoSuchProjectError(Exception):
"""A specified project does not exist in the work tree.
"""
def __init__(self, name=None):
self.name = name
def __str__(self):
if self.Name is None:
return 'in current directory'
return self.name
class RepoChangedException(Exception):
"""Thrown if 'repo sync' results in repo updating its internal
repo or manifest repositories. In this special case we must
use exec to re-execute repo with the new code and manifest.
"""
def __init__(self, extra_args=[]):
self.extra_args = extra_args
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import optparse
import sys
import manifest_loader
from error import NoSuchProjectError
class Command(object):
"""Base class for any command line action in repo.
"""
common = False
_optparse = None
def WantPager(self, opt):
return False
@property
def OptionParser(self):
if self._optparse is None:
try:
me = 'repo %s' % self.NAME
usage = self.helpUsage.strip().replace('%prog', me)
except AttributeError:
usage = 'repo %s' % self.NAME
self._optparse = optparse.OptionParser(usage = usage)
self._Options(self._optparse)
return self._optparse
def _Options(self, p):
"""Initialize the option parser.
"""
def Usage(self):
"""Display usage and terminate.
"""
self.OptionParser.print_usage()
sys.exit(1)
def Execute(self, opt, args):
"""Perform the action, after option parsing is complete.
"""
raise NotImplementedError
@property
def manifest(self):
return self.GetManifest()
def GetManifest(self, reparse=False, type=None):
return manifest_loader.GetManifest(self.repodir,
reparse=reparse,
type=type)
def GetProjects(self, args, missing_ok=False):
"""A list of projects that match the arguments.
"""
all = self.manifest.projects
mp = self.manifest.manifestProject
if mp.relpath == '.':
all = dict(all)
all[mp.name] = mp
result = []
if not args:
for project in all.values():
if missing_ok or project.Exists:
result.append(project)
else:
by_path = None
for arg in args:
project = all.get(arg)
if not project:
path = os.path.abspath(arg).replace('\\', '/')
if not by_path:
by_path = dict()
for p in all.values():
by_path[p.worktree] = p
try:
project = by_path[path]
except KeyError:
oldpath = None
while path \
and path != oldpath \
and path != self.manifest.topdir:
try:
project = by_path[path]
break
except KeyError:
oldpath = path
path = os.path.dirname(path)
if not project:
raise NoSuchProjectError(arg)
if not missing_ok and not project.Exists:
raise NoSuchProjectError(arg)
result.append(project)
def _getpath(x):
return x.relpath
result.sort(key=_getpath)
return result
class InteractiveCommand(Command):
"""Command which requires user interaction on the tty and
must not run within a pager, even if the user asks to.
"""
def WantPager(self, opt):
return False
class PagedCommand(Command):
"""Command which defaults to output in a pager, as its
display tends to be larger than one screen full.
"""
def WantPager(self, opt):
return True
class MirrorSafeCommand(object):
"""Command permits itself to run within a mirror,
and does not require a working directory.
"""
| Python |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import pager
from git_config import GitConfig
COLORS = {None :-1,
'normal' :-1,
'black' : 0,
'red' : 1,
'green' : 2,
'yellow' : 3,
'blue' : 4,
'magenta': 5,
'cyan' : 6,
'white' : 7}
ATTRS = {None :-1,
'bold' : 1,
'dim' : 2,
'ul' : 4,
'blink' : 5,
'reverse': 7}
RESET = "\033[m"
def is_color(s): return s in COLORS
def is_attr(s): return s in ATTRS
def _Color(fg = None, bg = None, attr = None):
fg = COLORS[fg]
bg = COLORS[bg]
attr = ATTRS[attr]
if attr >= 0 or fg >= 0 or bg >= 0:
need_sep = False
code = "\033["
if attr >= 0:
code += chr(ord('0') + attr)
need_sep = True
if fg >= 0:
if need_sep:
code += ';'
need_sep = True
if fg < 8:
code += '3%c' % (ord('0') + fg)
else:
code += '38;5;%d' % fg
if bg >= 0:
if need_sep:
code += ';'
need_sep = True
if bg < 8:
code += '4%c' % (ord('0') + bg)
else:
code += '48;5;%d' % bg
code += 'm'
else:
code = ''
return code
class Coloring(object):
def __init__(self, config, type):
self._section = 'color.%s' % type
self._config = config
self._out = sys.stdout
on = self._config.GetString(self._section)
if on is None:
on = self._config.GetString('color.ui')
if on == 'auto':
if pager.active or os.isatty(1):
self._on = True
else:
self._on = False
elif on in ('true', 'always'):
self._on = True
else:
self._on = False
def redirect(self, out):
self._out = out
@property
def is_on(self):
return self._on
def write(self, fmt, *args):
self._out.write(fmt % args)
def flush(self):
self._out.flush()
def nl(self):
self._out.write('\n')
def printer(self, opt=None, fg=None, bg=None, attr=None):
s = self
c = self.colorer(opt, fg, bg, attr)
def f(fmt, *args):
s._out.write(c(fmt, *args))
return f
def colorer(self, opt=None, fg=None, bg=None, attr=None):
if self._on:
c = self._parse(opt, fg, bg, attr)
def f(fmt, *args):
str = fmt % args
return ''.join([c, str, RESET])
return f
else:
def f(fmt, *args):
return fmt % args
return f
def _parse(self, opt, fg, bg, attr):
if not opt:
return _Color(fg, bg, attr)
v = self._config.GetString('%s.%s' % (self._section, opt))
if v is None:
return _Color(fg, bg, attr)
v = v.strip().lower()
if v == "reset":
return RESET
elif v == '':
return _Color(fg, bg, attr)
have_fg = False
for a in v.split(' '):
if is_color(a):
if have_fg: bg = a
else: fg = a
elif is_attr(a):
attr = a
return _Color(fg, bg, attr)
| Python |
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from manifest_submodule import SubmoduleManifest
from manifest_xml import XmlManifest
def ParseManifest(repodir, type=None):
if type:
return type(repodir)
if SubmoduleManifest.Is(repodir):
return SubmoduleManifest(repodir)
return XmlManifest(repodir)
_manifest = None
def GetManifest(repodir, reparse=False, type=None):
global _manifest
if _manifest is None \
or reparse \
or (type and _manifest.__class__ != type):
_manifest = ParseManifest(repodir, type=type)
return _manifest
| Python |
Import ('env')
env.Append(CXXFLAGS=['--std=c++0x'])
name = 'fideo'
inc = env.Dir('.')
ext_inc = []
src = env.Glob('src/*.cpp')
deps = ['etilico', 'mili', 'biopp', 'stl-debug', 'unafold', 'rnahybrid', 'vienna1.8.5', 'intarna', 'vienna2.0.7', 'inforna']
env.CreateSharedLibrary(name, inc, ext_inc, src, deps)
| Python |
Import ('env')
name = 'fideo'
inc = env.Dir('.')
src = env.Glob('*.cpp')
deps = ['etilico', 'biopp', 'gtest_main', 'gtest', 'gmock']
env.CreateTest(name, inc, src, deps)
| Python |
#!/usr/bin/env python
#
# Copyright (C) 2010 ZXing authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Translate a string from English to all locales used in the Barcode
# Scanner Android project
#
# Author: Neha Pandey
from urllib2 import urlopen
from urllib import urlencode
import sys
def translate (in_lang, out_lang, input):
"""Translate the input from in_lang to out_lang using Google Translate"""
# Create the URL
langpair = '%s|%s' % (in_lang, out_lang)
base = 'http://ajax.googleapis.com/ajax/services/language/translate?'
params = urlencode ((('v',1.0),
('q',input),
('langpair',langpair),) )
url = base + params
# Call translation
content = urlopen(url).read()
# Snip out unwanted fluff from the translation
start_index = content.find('"translatedText":"') + 18
translation = content [start_index:]
end_index = translation.find('"}, "')
output = translation[:end_index]
return output
# All the languages to translate to
language_list = ['en', 'ar', 'cs', 'da', 'de', 'es',
'fi', 'fr', 'hu', 'it', 'ja', 'nl',
'pl', 'pt', 'ru', 'sv', 'zh-CN',
'zh-TW']
if (len(sys.argv) < 3):
print "Usage: %s name String to translate" % sys.argv[0]
print "Sample: %s ask-banana Give me a banana" % sys.argv[0]
import sys
sys.exit (-1);
# First argument is the name of the string
string_name = sys.argv[1]
# Remaining arguments is the string to be translated in English
input_string =' '.join(sys.argv[2:])
# Translate all languages
for i in range(len(language_list)) :
translation = translate ('en', language_list[i], input_string)
xml_string = '<string name="' + string_name + '">' + \
translation + '</string>'
print language_list[i], xml_string
| Python |
#!/usr/bin/python
# Copyright 2011 Google, Inc. All Rights Reserved.
# simple script to walk source tree looking for third-party licenses
# dumps resulting html page to stdout
import os, re, mimetypes, sys
# read source directories to scan from command line
SOURCE = sys.argv[1:]
# regex to find /* */ style comment blocks
COMMENT_BLOCK = re.compile(r"(/\*.+?\*/)", re.MULTILINE | re.DOTALL)
# regex used to detect if comment block is a license
COMMENT_LICENSE = re.compile(r"(license)", re.IGNORECASE)
COMMENT_COPYRIGHT = re.compile(r"(copyright)", re.IGNORECASE)
EXCLUDE_TYPES = [
"application/xml",
"image/png",
]
# list of known licenses; keys are derived by stripping all whitespace and
# forcing to lowercase to help combine multiple files that have same license.
KNOWN_LICENSES = {}
class License:
def __init__(self, license_text):
self.license_text = license_text
self.filenames = []
# add filename to the list of files that have the same license text
def add_file(self, filename):
if filename not in self.filenames:
self.filenames.append(filename)
LICENSE_KEY = re.compile(r"[^\w]")
def find_license(license_text):
# TODO(alice): a lot these licenses are almost identical Apache licenses.
# Most of them differ in origin/modifications. Consider combining similar
# licenses.
license_key = LICENSE_KEY.sub("", license_text).lower()
if license_key not in KNOWN_LICENSES:
KNOWN_LICENSES[license_key] = License(license_text)
return KNOWN_LICENSES[license_key]
def discover_license(exact_path, filename):
# when filename ends with LICENSE, assume applies to filename prefixed
if filename.endswith("LICENSE"):
with open(exact_path) as file:
license_text = file.read()
target_filename = filename[:-len("LICENSE")]
if target_filename.endswith("."): target_filename = target_filename[:-1]
find_license(license_text).add_file(target_filename)
return None
# try searching for license blocks in raw file
mimetype = mimetypes.guess_type(filename)
if mimetype in EXCLUDE_TYPES: return None
with open(exact_path) as file:
raw_file = file.read()
# include comments that have both "license" and "copyright" in the text
for comment in COMMENT_BLOCK.finditer(raw_file):
comment = comment.group(1)
if COMMENT_LICENSE.search(comment) is None: continue
if COMMENT_COPYRIGHT.search(comment) is None: continue
find_license(comment).add_file(filename)
for source in SOURCE:
for root, dirs, files in os.walk(source):
for name in files:
discover_license(os.path.join(root, name), name)
print "<html><head><style> body { font-family: sans-serif; } pre { background-color: #eeeeee; padding: 1em; white-space: pre-wrap; } </style></head><body>"
for license in KNOWN_LICENSES.values():
print "<h3>Notices for files:</h3><ul>"
filenames = license.filenames
filenames.sort()
for filename in filenames:
print "<li>%s</li>" % (filename)
print "</ul>"
print "<pre>%s</pre>" % license.license_text
print "</body></html>"
| Python |
# module is created to emulate some kind of data driven testing approach
# add module path to PYTHONPATH ta make it working
movieDataDict_default = {"name":None, "year":None, "imdbid":None, "aka":None, "duration":None, "rating":None,
"format":None, "own":None, "seen":None, "loaned":None, "loanname":None, "loandate":None,
"cover":None, "trailer":None, "notes":None, "taglines":None, "plotoutline":None, "plots":None,
"languages":None, "subtitles":None, "audio":None, "video":None, "country":None, "genres":None,
"director":None, "writer":None, "producer":None, "music":None, "cast":None}
def getTestDataTC1():
addMovie_testdata = movieDataDict_default.copy()
addMovie_testdata['name'] = "The Movie"
addMovie_testdata['year'] = "2013"
addMovie_testdata['imdbid'] = "2099759"
addMovie_testdata['trailer'] = "http://my.test.com"
#addMovie_testdata['cover'] = "D:/3.jpg"
return addMovie_testdata
def getTestDataTC2():
addMovie_testdata = movieDataDict_default.copy()
addMovie_testdata['name'] = "The Movie2"
#addMovie_testdata['year'] = "1998"
return addMovie_testdata
def getTestDataTC3():
addMovie_testdata = movieDataDict_default.copy()
addMovie_testdata['name'] = "Test Movie"
addMovie_testdata['year'] = "2000"
return addMovie_testdata | Python |
# add module path to PYTHONPATH ta make it working
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support.expected_conditions import *
addIcon = "Add movie"
updateIcon = "Update all"
exportIcon = "Export"
saveIcon = "Save"
ownIcon = "Own"
seenIcon = "Seen"
editIcon = "Edit"
removeIcon = "Remove"
visitIcon = "Visit IMDb"
trailerIcon = "View trailer"
coverIcon = "Download cover"
def setUpTest(TestCase):
"""perform test case Set Up"""
TestCase.driver = webdriver.Firefox()
TestCase.driver.implicitly_wait(10)
TestCase.base_url = "http://localhost/php4dvd/"
TestCase.verificationErrors = []
def tearDownTest(TestCase):
"""perform test case Tear Down"""
TestCase.driver.quit()
TestCase.assertEqual([], TestCase.verificationErrors)
def accept_alert_and_get_its_text(TestCase):
alert = TestCase.driver.switch_to_alert()
text = alert.text
alert.accept()
return text
def cancel_alert_and_get_its_text(TestCase):
alert = TestCase.driver.switch_to_alert()
text = alert.text
alert.dismiss()
return text
def is_element_present(where, how, what):
try: where.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def logIn(TestCase, username, password):
"""perform Log In steps"""
TestCase.driver.get(TestCase.base_url)
TestCase.driver.find_element_by_id("username").clear()
TestCase.driver.find_element_by_id("username").send_keys(username)
TestCase.driver.find_element_by_name("password").clear()
TestCase.driver.find_element_by_name("password").send_keys(password)
TestCase.driver.find_element_by_name("submit").click()
# return list of movies in catalogue
return verifyCatalogue(TestCase)
def clickAddMovie(TestCase):
"""perform Add movie icon click"""
TestCase.driver.find_element_by_css_selector("img[alt=\"Add movie\"]").click()
# assert naigation panel presence with 'Save' icon only
TestCase.assertTrue(checkNavigationPanel(TestCase, [saveIcon]))
# assert 'Movie information' page opens
TestCase.assertTrue(is_element_present(TestCase.driver, By.ID, "updateform"))
def clickRemoveMovie(TestCase):
"""perform Remove movie icon click"""
# click Remove icon
TestCase.driver.find_element_by_css_selector("img[alt=\"Remove\"]").click()
# close alert message and check its text
TestCase.assertRegexpMatches(accept_alert_and_get_its_text(TestCase),
r"^Are you sure you want to remove this[\s\S]$")
return verifyCatalogue(TestCase)
def checkNavigationPanel(TestCase, presentIconList):
"""perform navigation panel verifcation
presentIconList - list of icons to be present"""
# locate navigation panel
nav = TestCase.driver.find_element_by_css_selector("section > nav")
# get list of icons
displayed_icons = nav.find_elements_by_xpath("//div/a/img")
# get list of icon titles
displayed_icon_titles = [icon.get_attribute("title") for icon in displayed_icons]
# compare expected list with actual list
return displayed_icon_titles == presentIconList
def verifyCatalogue(TestCase):
"""verify correct icons are displayed in navigation panel, search box is present, catalogue is displayed"""
# assert navigation panel displays corect icons
TestCase.assertTrue(checkNavigationPanel(TestCase, [addIcon, updateIcon, exportIcon]))
# assert Search box is present
TestCase.assertTrue(is_element_present(TestCase.driver, By.ID, "q"))
# create list of movies
movieElementList = TestCase.driver.find_elements_by_xpath("//section/div[3]/a/div/div[2]")
# return list of movie titles
return [elem.text for elem in movieElementList]
def moviePopulateValues(TestCase):
"""perform entering values to 'Add movie' or 'Edit moie' form
if any attribute is None, it won't be changed"""
# populate corresponding values to the form
for name, value in TestCase.movieDataDict.items():
# skip if value is None
if value is not None:
# set radio controls
if name in ["own", "seen", "loaned"]:
if value:
TestCase.driver.find_element_by_id(name+"_yes").click()
else:
TestCase.find_element_by_id(name+"_no").click()
# set value by element id
elif name not in ["loanname", "loandate"]:
TestCase.driver.find_element_by_name(name).clear
TestCase.driver.find_element_by_name(name).send_keys(value)
# set values of hidden text boxes if they become visible
if TestCase.movieDataDict["loaned"]:
if TestCase.movieDataDict["loanname"] is not None:
TestCase.driver.find_element_by_name("loanname").clear()
TestCase.driver.find_element_by_name("loanname").send_keys(TestCase.movieDataDict["loanname"])
if TestCase.movieDataDict["loandate"] is not None:
TestCase.driver.find_element_by_name("loandate").clear()
TestCase.driver.find_element_by_name("loandate").send_keys(TestCase.movieDataDict["loandate"])
def movieSave(TestCase, bySaveIcon):
"""perform save for 'Add movie' or 'Edit movie' form
if bySaveIcon is True then form is saves by Save icon click, esle by Submit button click"""
if bySaveIcon:
TestCase.driver.find_element_by_css_selector("img[alt=\"Save\"]").click()
else:
TestCase.driver.find_element_by_id("submit").click()
# here goes verification of entered information
# currently only some parameters are verified
nav = TestCase.driver.find_element_by_css_selector("section > nav")
# create list of icons to check against navigation panel
iconList = []
# check IMDb number if it was provided
if TestCase.movieDataDict["imdbid"]:
# check Visit IMDb icon's href
link = nav.find_element_by_xpath("//li/div/div/a").get_attribute("href")
TestCase.assertTrue(link == "http://www.imdb.com/title/tt"+TestCase.movieDataDict["imdbid"]+"/")
# add icon to the list
iconList.append(visitIcon)
# check Trailer URL if it was provided
if TestCase.movieDataDict["trailer"]:
# check View trailer icon's href
xpath = "//li[2]/div/div/a" if iconList else "//li/div/div/a"
link = nav.find_element_by_xpath(xpath).get_attribute("href")
TestCase.assertTrue(link == TestCase.movieDataDict["trailer"]+"/?iframe=true&width=100%&height=100%")
# add icon to the list
iconList.append(trailerIcon)
# check Cover if it was provided
if TestCase.movieDataDict["cover"]:
# check coer image tag is present
TestCase.assertTrue(is_element_present(TestCase.driver, By.XPATH,
"/html/body/div/div/div/section/div/div/div/img"))
# add icon to the list
iconList.append(coverIcon)
# check movie name and year
h2 = TestCase.driver.find_element_by_xpath("/html/body/div/div/div/section/div/div/div[2]/h2")
TestCase.assertTrue(h2.text == TestCase.movieDataDict["name"]+" ("+TestCase.movieDataDict["year"]+")")
# check navigation menu
iconList += [ownIcon, seenIcon, editIcon, removeIcon]
TestCase.assertTrue(checkNavigationPanel(TestCase, iconList))
def movieSaveInvalid(TestCase, bySaveIcon):
"""perform save for 'Add movie' or 'Edit movie' form with empty required field
if bySaveIcon is True then form is saves by Save icon click, esle by Submit button click"""
if bySaveIcon:
TestCase.driver.find_element_by_css_selector("img[alt=\"Save\"]").click()
else:
TestCase.driver.find_element_by_id("submit").click()
# check required field labels are displayed
if not TestCase.movieDataDict["name"]:
label = TestCase.driver.find_element_by_xpath("//table/tbody/tr[2]/td[2]/label")
TestCase.assertTrue(label.text == "This field is required")
if not TestCase.movieDataDict["year"]:
label = TestCase.driver.find_element_by_xpath("//table/tbody/tr[4]/td[2]/label")
TestCase.assertTrue(label.text == "This field is required")
def locateMovieByName(TestCase):
"""find and return first occurance of movie element with given name"""
elementList = TestCase.driver.find_elements_by_xpath("//section/div[3]/a/div/div[2]")
for element in elementList:
if element.text == TestCase.movieToRemove:
break
return element
def goHomeMenu(TestCase):
"""navigate to catalogue by clicking Home menu item"""
TestCase.driver.find_element_by_link_text("Home").click()
return verifyCatalogue(TestCase)
def addMovieToCatalogue(TestCase):
"""utility function to add movie with minimum verifications"""
goHomeMenu(TestCase)
clickAddMovie(TestCase)
moviePopulateValues(TestCase)
movieSave(TestCase, bySaveIcon = False)
return goHomeMenu(TestCase)
def searchForMovie(TestCase):
"""perform movie searching by entering text to search box"""
TestCase.driver.find_element_by_id("q").clear()
if TestCase.movieToSearch:
TestCase.driver.find_element_by_id("q").send_keys(TestCase.movieToSearch)
TestCase.driver.find_element_by_id("q").send_keys(Keys.RETURN)
wait = WebDriverWait(TestCase.driver, 10)
# check 'loading' progress bar is displayed while searching
TestCase.assertTrue(wait.until(visibility_of_element_located((By.ID, "loading"))))
# check catalogue is displayed after searching done
TestCase.assertTrue(wait.until(visibility_of_element_located((By.ID, "results"))))
return verifyCatalogue(TestCase)
| Python |
def testit():
"""Run all tests"""
import py
py.test.cmdline.main()
| Python |
#!/usr/bin/env python
# # Copyright (c) 2010 Harry Delmolino
# # Permission is hereby granted, free of charge, to any person obtaining a copy
# # of this software and associated documentation files (the "Software"), to deal
# # in the Software without restriction, including without limitation the rights
# # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# # copies of the Software, and to permit persons to whom the Software is
# # furnished to do so, subject to the following conditions:
# # The above copyright notice and this permission notice shall be included in
# # all copies or substantial portions of the Software.
# # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# # THE SOFTWARE.
from PyQt4 import QtCore, QtGui
import os
import urllib
class ImageViewer(QtGui.QMainWindow):
def __init__(self):
super(ImageViewer, self).__init__()
self.imgList = []
self.imageLabel = QtGui.QLabel()
self.imageLabel.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(self.imageLabel)
self.setWindowTitle("Image Viewer")
self.showMaximized()
def download(self, url):
fileName = url.split('/')[-1]
urllib.urlretrieve(url, fileName)
return fileName
def open(self, url = None):
if os.path.isfile(url):
return os.path.abspath(url)
else:
return os.path.abspath(self.download(url))
def load(self, url = None):
fileName = self.open(url)
if fileName:
image = QtGui.QImage(fileName)
if image.isNull():
QtGui.QMessageBox.information(self, "Image Viewer", "Cannot load %s." % fileName)
return
if image.size().width() > self.imageLabel.size().width() or image.size().height() > self.imageLabel.size().height():
return QtGui.QPixmap.fromImage(image).scaled(self.imageLabel.size().width(), self.imageLabel.size().height(), QtCore.Qt.KeepAspectRatio, QtCore.Qt.FastTransformation)
else:
return QtGui.QPixmap.fromImage(image)
def nextPic(self, list = None):
list = self.imgList
if not list:
QtGui.QMessageBox.information(self, "Image Viewer", "No image to load.")
return
self.imageLabel.setPixmap(self.load(self.imgList.pop(0)))
def keyPressEvent(self, event):
if type(event) == QtGui.QKeyEvent:
if event.key() == ord('N'):
self.nextPic()
event.accept()
else:
event.ignore()
| Python |
#!/usr/bin/env python
# # Copyright (c) 2010 Harry Delmolino
# # Permission is hereby granted, free of charge, to any person obtaining a copy
# # of this software and associated documentation files (the "Software"), to deal
# # in the Software without restriction, including without limitation the rights
# # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# # copies of the Software, and to permit persons to whom the Software is
# # furnished to do so, subject to the following conditions:
# # The above copyright notice and this permission notice shall be included in
# # all copies or substantial portions of the Software.
# # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# # THE SOFTWARE.
from PyQt4 import QtCore, QtGui
import os
import urllib
class ImageViewer(QtGui.QMainWindow):
def __init__(self):
super(ImageViewer, self).__init__()
self.imgList = []
self.imageLabel = QtGui.QLabel()
self.imageLabel.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(self.imageLabel)
self.setWindowTitle("Image Viewer")
self.showMaximized()
def download(self, url):
fileName = url.split('/')[-1]
urllib.urlretrieve(url, fileName)
return fileName
def open(self, url = None):
if os.path.isfile(url):
return os.path.abspath(url)
else:
return os.path.abspath(self.download(url))
def load(self, url = None):
fileName = self.open(url)
if fileName:
image = QtGui.QImage(fileName)
if image.isNull():
QtGui.QMessageBox.information(self, "Image Viewer", "Cannot load %s." % fileName)
return
if image.size().width() > self.imageLabel.size().width() or image.size().height() > self.imageLabel.size().height():
return QtGui.QPixmap.fromImage(image).scaled(self.imageLabel.size().width(), self.imageLabel.size().height(), QtCore.Qt.KeepAspectRatio, QtCore.Qt.FastTransformation)
else:
return QtGui.QPixmap.fromImage(image)
def nextPic(self, list = None):
list = self.imgList
if not list:
QtGui.QMessageBox.information(self, "Image Viewer", "No image to load.")
return
self.imageLabel.setPixmap(self.load(self.imgList.pop(0)))
def keyPressEvent(self, event):
if type(event) == QtGui.QKeyEvent:
if event.key() == ord('N'):
self.nextPic()
event.accept()
else:
event.ignore()
| Python |
#!/usr/bin/env python
# # Copyright (c) 2010 Harry Delmolino
# # Permission is hereby granted, free of charge, to any person obtaining a copy
# # of this software and associated documentation files (the "Software"), to deal
# # in the Software without restriction, including without limitation the rights
# # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# # copies of the Software, and to permit persons to whom the Software is
# # furnished to do so, subject to the following conditions:
# # The above copyright notice and this permission notice shall be included in
# # all copies or substantial portions of the Software.
# # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# # THE SOFTWARE.
from PyQt4 import QtCore, QtGui
from viewer import ImageViewer
import sys
def main():
app = QtGui.QApplication(sys.argv)
imageViewer = ImageViewer()
imageViewer.imgList = ["http://i.imgur.com/7wQEll.jpg", "http://i.imgur.com/l8XmY.jpg", "http://i.imgur.com/G7rli.jpg", "http://i.imgur.com/Jugtg.jpg"]
sys.exit(app.exec_())
if __name__ == '__main__':
main() | Python |
#!/usr/bin/env python
# # Copyright (c) 2010 Harry Delmolino
# # Permission is hereby granted, free of charge, to any person obtaining a copy
# # of this software and associated documentation files (the "Software"), to deal
# # in the Software without restriction, including without limitation the rights
# # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# # copies of the Software, and to permit persons to whom the Software is
# # furnished to do so, subject to the following conditions:
# # The above copyright notice and this permission notice shall be included in
# # all copies or substantial portions of the Software.
# # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# # THE SOFTWARE.
from PyQt4 import QtCore, QtGui
from viewer import ImageViewer
import sys
def main():
app = QtGui.QApplication(sys.argv)
imageViewer = ImageViewer()
imageViewer.imgList = ["http://i.imgur.com/7wQEll.jpg", "http://i.imgur.com/l8XmY.jpg", "http://i.imgur.com/G7rli.jpg", "http://i.imgur.com/Jugtg.jpg"]
sys.exit(app.exec_())
if __name__ == '__main__':
main() | Python |
#!/usr/bin/env python
from distutils.core import setup
#from setuptools import setup, find_packages
setup(
name='Figit',
version='0.1',
packages=['Figit', 'Figit.channels', 'Figit.vcs'],
scripts=['bin/figit.py'],
#packages = find_packages(),
# for SSH connections we use the paramiko library.
#install_requires = ['paramiko>=1.6.4'],
# Meta data for PyPI upload.
author='Jake Davis',
author_email='mrsalty0@gmail.com',
description='Config file version control via ssh for the impatient',
license = 'GPL',
url='http://code.google.com/p/figit/',
)
| Python |
#!/usr/bin/env python
from distutils.core import setup
#from setuptools import setup, find_packages
setup(
name='Figit',
version='0.1',
packages=['Figit', 'Figit.channels', 'Figit.vcs'],
scripts=['bin/figit.py'],
#packages = find_packages(),
# for SSH connections we use the paramiko library.
#install_requires = ['paramiko>=1.6.4'],
# Meta data for PyPI upload.
author='Jake Davis',
author_email='mrsalty0@gmail.com',
description='Config file version control via ssh for the impatient',
license = 'GPL',
url='http://code.google.com/p/figit/',
)
| Python |
# Local filesystem access channel for Figit.
# import os
# import sha
# class LocalChannel:
# """
# A Class with functions for examining and manipulating a source
# directory that resides on the local file system.
# """
# # Until someone thinks of a better way, we'll brute force permissions
# # using sudo whenever the first write attempt fails.
# import pwd
# import grp
# from shutil import copyfile
# from stat import S_ISDIR
# from commands import getoutput as go
# def __init__(self):
# pass
# def ls(self, rpath):
# return os.listdir(rpath)
# def digest(rpath, sudopw='No'):
# """Returns a sha hash of the file content of remote path."""
# try:
# fobj = open(rpath)
# chmodded = False
# except IOError:
# # Force world readableness
# go("echo %s | sudo chmod o+r %s" % (sudopw, rpath))
# chmodded = True
# fobj = open(rpath)
# m = sha.new()
# while True:
# d = fobj.read(8096)
# if not d:
# break
# m.update(d)
# fobj.close()
# if chmodded == True:
# # Undo world readableness
# go("echo %s | sudo chmod o-r %s" % (sudopw, rpath))
# return m.hexdigest()
#
# def stats(self, rpath, sudopw='No'):
# """Returns (uid, gid, mode, filetype, sha1hash) for rpath."""
# info = os.stat(rpath)
# uid = pwd.getpwuid(info[4])[0]
# gid = grp.getgrgid(info[5])[0]
# mode = oct(info[0])[-4:]
# if S_ISDIR(info.st_mode):
# filetype = 'directory'
# else:
# filetype = 'file'
# newdigest = self.digest(rpath, sudopw)
# return (uid, gid, mode, filetype, newdigest)
# def chmod(self, rpath, ownership, mode, sudopw='No'):
# """Set file ownership and mode on the remote path."""
# out = go("echo %s | sudo chown %s %s" % (sudopw, ownership, rpath))
# out += go("echo %s | sudo chmod %s %s" % (sudopw, mode, rpath))
# return out
#
# def get(self, rpath, localpath, sudopw='No'):
# """download a file"""
# try:
# copyfile(rpath, localpath)
# except IOError:
# # Force world readableness
# go("echo %s | sudo chmod o+r %s" % (sudopw, rpath))
# copyfile(rpath, localpath)
# # Undo world readableness
# go("echo %s | sudo chmod o-r %s" % (sudopw, rpath))
#
# def put(self, localpath, rpath, sudopw='No'):
# """Copy a file to the remote directory."""
# try:
# copyfile(localpath, rpath)
# except IOError:
# # force world readableness
# go("echo %s | sudo chmod o+r %s" % (sudopw, rpath))
# copyfile(localpath, rpath)
# # undo world readableness
# go("echo %s | sudo chmod o-r %s" % (sudopw, rpath))
# def rename(self, rpath, sudopw='No'):
# """Rename a file with a '.bak' extension."""
# try:
# os.rename(rpath, rpath+'.bak')
# except IOError:
# # force world readableness
# go("echo %s | sudo chmod o+r %s" % (sudopw, rpath))
# os.rename(rpath, rpath+'.bak')
| Python |
# SSH remote filesystem access channel for Figit.
import os
import sha
import getpass
from paramiko import SSHClient, SSHException
from paramiko.util import log_to_file, load_host_keys
class SSHChannel:
"""
A Class with functions for examining and manipulating a source directory
that resides on a remote system.
"""
# Until someone thinks of a better way, we'll brute force permissions
# using sudo whenever the first write attempt fails.
def __init__(self, user, host, port, sudopw):
log_to_file('/var/tmp/figit-ssh.log') # TODO: make this os agnostic.
self.port = int(port)
self.sudopw = sudopw
self.client = SSHClient()
self.client.load_system_host_keys()
try: # 1st try an ssh key
self.client.connect(host, port, user)
# Versions of paramiko prior to 1.7 fail here. If you have v1.6.4
# or earlier, specifying key_filename should fix the problem.
# key_filename=os.path.expanduser('~/.ssh/id_rsa')
except SSHException:
try: # next try the sudo password
password = sudopw
self.client.connect(host, port, user, password)
except SSHException: # finally ask the user for a password.
password = getpass.getpass("Enter ssh password for %s: " % user)
self.client.connect(host, port, user, password)
self.sftp = self.client.open_sftp()
def run(self, command):
"""Run a command on the remote host."""
stdin, stdout, stderr = self.client.exec_command(command)
errors = stderr.readlines()
out = stdout.readlines()
stdin.close()
stdout.close()
stderr.close()
if errors != []:
return errors
else:
return out
def ls(self, rpath):
return self.sftp.listdir(rpath)
def digest(self, rpath):
"""Returns a sha1 hash of the file content of remote path."""
try:
fobj = self.sftp.open(rpath)
chmodded = False
except IOError:
# Force world readableness
self.run("echo %s | sudo chmod o+r %s" % (self.sudopw, rpath))
chmodded = True
fobj = self.sftp.open(rpath)
m = sha.new()
while True:
d = fobj.read(8096)
if not d:
break
m.update(d)
fobj.close()
if chmodded == True:
# Undo world readableness
self.run("echo %s | sudo chmod o-r %s" % (self.sudopw, rpath))
return m.hexdigest()
def stats(self, rpath):
"""Returns (uid, gid, mode, filetype, sha1hash) for rpath."""
newdigest = self.digest(rpath)
stdin, stdout, stderr = self.client.exec_command(
"""stat -c %U:%G:%a:%F """ + rpath)
uid, gid, mode, filetype = stdout.read().split(':')
stdin.close()
stdout.close()
stderr.close()
# The stat shell command often returns a three digit mode, e.g. "644"
# so zfill is used to pad the result with leading zeros, e.g. "0644".
return (uid, gid, mode.zfill(4), filetype.strip(), newdigest)
def chmod(self, rpath, ownership, mode):
"""Set file ownership and permissions mode on remote path"""
out = self.run("echo %s | sudo chown %s %s"
% (self.sudopw, ownership, rpath))
out += self.run("echo %s | sudo chmod %s %s"
% (self.sudopw, mode, rpath))
return out
def get(self, rpath, localpath):
"""download a file"""
try:
self.sftp.get(rpath, localpath)
except IOError:
# Force world readableness
out = self.run("echo %s | sudo chmod o+r %s" % (self.sudopw, rpath))
self.sftp.get(rpath, localpath)
# Undo world readableness
out = self.run("echo %s | sudo chmod o-r %s" % (self.sudopw, rpath))
def put(self, localpath, rpath, ownership, mode):
"""Upload a file to the remote host."""
try: # make sure the file exists
self.sftp.normalize(rpath)
except IOError:
try: # make sure the directory exists before creating file.
self.sftp.normalize(os.path.dirname(rpath))
except IOError:
self.run("echo %s | sudo mkdir %s" % (self.sudopw,
os.path.dirname(rpath)))
self.run("echo %s | sudo touch %s" % (self.sudopw, rpath))
try:
self.sftp.put(localpath, rpath)
except IOError:
# Remember permissions of parent directory, then upload file.
dmode = self.run("stat -c %a " + os.path.dirname(rpath))[0].strip()
if dmode[-1] < 7:
self.run("echo %s | sudo chmod %s %s" % (self.sudopw, 'o+rwx',
os.path.dirname(rpath)))
dirmodded = True
else:
dirmodded = False
self.run("echo %s | sudo chmod %s %s" % (self.sudopw, '666', rpath))
self.sftp.put(localpath, rpath)
if dirmodded: # Change parent mode back to what it was.
self.run("echo %s | sudo chmod %s %s" % (self.sudopw, dmode,
os.path.dirname(rpath)))
return self.chmod(rpath, ownership, mode)
def rename(self, rpath):
"""rename a file .bak"""
try:
self.sftp.rename(rpath, rpath+'.bak')
except IOError:
return self.run("echo %s | sudo mv %s %s"
% (self.sudopw, rpath, rpath+'.bak'))
def close(self):
self.client.close()
| Python |
# Some common funtions used by Figit.
import sys
import os
import sha
from os.path import join, sep
from fnmatch import fnmatch
def quit(msg="\nDone"):
"""Clean up and exit."""
try:
M.commit()
channel.client.close()
V.checkout(DEVBRANCH)
except: pass
print msg
sys.exit(0)
def get_message(prompt):
text = raw_input(prompt)
text = text.replace('"', '')
text = text.replace("'", '')
return text
def fixpath(filename, src, wd):
"""
Returns a three key dictionary of the following strings for filename:
wp = fully qualified working directory path.
sp = fully qualified source path (minus "user@hostname:").
rp = relative path from base of wd or src.
"""
if filename.startswith(src):
return {'wp':filename.replace(src, wd),
'sp':filename,
'rp':filename.replace(src+sep,'')}
elif filename.startswith(wd):
return {'wp':filename,
'sp':filename.replace(wd,src),
'rp':filename.replace(wd+sep,'')}
else:
filename = os.path.abspath(join(wd, filename))
if filename.startswith(wd):
return {'wp':filename,
'sp':filename.replace(wd,src),
'rp':filename.replace(wd+sep,'')}
else:
raise ('PathError', "%s is not a valid working directory path."
% filename)
def list2string(filelist):
"""
Converts a list object (presumably containing filenames) to a string
for use as a shell command line argument.
"""
filenamestring = ''
for f in filelist:
filenamestring += f + ' '
return filenamestring.rstrip() # Chop off very last space.
def getconf():
"""Read in values from figit config files"""
# TODO: Add provision for calling from a subdirectory of wd.
# TODO: Add more error checking.
confile = open( join(os.getcwd(), ".figit", "config"), 'r' )
wd_config_line = confile.readline()
assert wd_config_line.startswith('wd:')
wd = wd_config_line[3:-1]
src_config_line = confile.readline()
assert src_config_line.startswith('src:')
src = src_config_line[4:-1]
if src.split(':').__len__() == 2:
port = 22
elif src.split(':').__len__() == 3:
l = src[4:-1].split(':')
src = l[0] + ':' + l[2]
port = int(l[1])
hostlist = []
hosts = confile.readlines()
if hosts != []:
for h in hosts:
assert h.startswith('host:')
hostlist.append(h.split(':')[1].rstrip())
if src.count(':') == 1:
usersrchost, src = src.split(':')
user, srchost = usersrchost.split('@')
else:
user, srchost = None, None
hostlist.insert(0, srchost) # I think Source Host should always be first.
return {'src':src, 'port':port, 'wd':wd, 'user':user, 'hosts':hostlist}
def wddigest(filename):
"""Return the sha digest of a file in the working directory."""
fobj = open(filename)
m = sha.new()
while True:
d = fobj.read(8096)
if not d:
break
m.update(d)
fobj.close()
return m.hexdigest()
def wddiff(VCS, IGNORE):
"""
Compares WD files and INSTALLBRANCH files.
Returns a list of files, other than figit ignore files, that have changed in
the WD.
"""
assert VCS.INSTALLBRANCH != VCS.branch()
print "Diffing %s and %s" % (VCS.branch(), VCS.INSTALLBRANCH)
changedfiles = VCS.diff()
print "CHANGEDFILES: %s" % changedfiles
cflist = []
for f in changedfiles:
cflist.append(f)
# Remove Figit IGNORE files which should not be distributed.
for pattern in IGNORE:
if fnmatch(f, pattern):
print "Removing %s from change list." % f
cflist.remove(f)
return cflist
def remotediff(manifest, src, wd, channel, filenames):
"""
Compares new sha hashes of the files on the distribution host against the
sha hashes in the current manifest. Returns a list of files that have
changed on the distribution host.
"""
changedfiles = []
for f in filenames:
fp = fixpath(f, src, wd)
olddigest = manifest.manifest[fp['rp']][-1]
try:
# An IOError here probably means the file doesn't exist.
channel.sftp.normalize(fp['sp'])
except IOError:
continue
newdigest = channel.digest(fp['sp'])
if olddigest == newdigest:
print "Remote copy of %s appears consistent." % fp['rp']
else:
changedfiles.append(fp['sp'])
return changedfiles
def get_channel(user, host, port, sudopw):
"""Return the appropriate class depending on the value of host."""
# TODO: udpate the local.py so that we have a working local channel.
# if host is None:
# from Figit.channels import local
# return local.LocalChannel()
# else:
# from Figit.channels import ssh
# return ssh.SSHChannel(user, host, port, sudopw)
from Figit.channels import ssh
return ssh.SSHChannel(user, host, port, sudopw)
def get_vcs(vcsname, wd, INSTALLBRANCH):
"""Return the appropriate class depending on what VCSNAME is set to."""
if vcsname.lower() == 'git':
from Figit.vcs import git
return git.Git(wd, INSTALLBRANCH)
elif vcsname.lower() == 'bzr':
from Figit.vcs import bzr
return bzr.Bzr(wd, INSTALLBRANCH)
else:
# CVS, SVN, BZR....
pass
| Python |
# Class for managing the manifest repository file.
from os import rename
from os.path import join
class Manifest:
"""
Class for managing the manifest repository file.
Always pass relative paths to these functions. [see: utils.fixpath()]
"""
# Since the manifest file is intended as part of the user interface we try
# to handle hand editing of it gracefully.
# Note that uid and gid, which may be different from machine to machine,
# must _not_ be recorded numerically. Names are used instead.
def __init__(self, wd, init_branch):
"""Initialize a dictionary object with data from the manifest file."""
self.init_branch = init_branch
self.wd = wd
self.manifest = {}
manifile = open(join(".figit", "manifest"), 'r')
for line in manifile:
if line[0] not in (';','#', '"', "'"): # Ignore comments
try:
filename = line.split()[0]
stats = line.split()[1:]
self.manifest[filename] = stats
except IndexError:
pass
manifile.close()
def update(self, filename, stats): # update is synonymous with add.
"""Add filename to manifest."""
if type(stats) == type(''):
self.manifest[filename] = stats.split()
else:
self.manifest[filename] = stats
def pop(self, filename):
"""Remove filename from manifest."""
try:
self.manifest.pop(filename)
except KeyError:
raise KeyError, "%s not found in the manifest." % filename
def commit(self, current_branch):
"""Commit the in-memory manifest dictionary to disk."""
# Note that if we were not on the same branch we initialized form,
# commiting now would really f*ck sh*t up.
try:
assert current_branch == self.init_branch
except AssertionError:
print "init_branch = %s \ncurrent_branch = %s" % (self.init_branch,
current_branch)
L = self.manifest.keys()
L.sort()
# Backup old file. first
rename(join(".figit", "manifest"), "%s" % join(".figit", "manifest") + ".bak")
manifile = open(join(".figit", "manifest"), 'w')
for key in L:
# Write (filename, ownership, mode, digest)
manifile.write("%s %s %s %s\n" % (key, self.manifest[key][0],
self.manifest[key][1],
self.manifest[key][2]))
manifile.close()
| Python |
# Mercurial VCS wrapper/backend for FIgit.
import os
from commands import getoutput as go
from os.path import join, sep
class Hg:
def __init__(self, wd, INSTALLBRANCH):
self.ignorefile = '.hgignore'
self.ignore_patterns = ['*~', '*.pyc', 'manifest.bak']
self.wd = wd
self.installdir = wd
self.INSTALLBRANCH = INSTALLBRANCH
self._init_branch = self.branch()
assert self._init_branch != INSTALLBRANCH
def initdb(self):
"""Initialize a vcs working directory."""
out = go('hg init') + '\n'
vcsignorefile = open(self.ignorefile, 'w')
for pattern in self.ignore_patterns:
vcsignorefile.write("%s\n" % pattern)
vcsignorefile.close()
out += go('hg add .')
out += go("hg commit -m 'figit: Initial commit.'")
out += go('hg branch %s' % self.INSTALLBRANCH)
return out
def branch(self, branchname=None):
"""Create a new branch or report the current branch name."""
if branchname is None: # Just report the current branch name.
try:
return open(join(".git", "HEAD")).read().split(sep)[-1].strip()
except IOError:
return None
else: # Create the named branch.
return go('hg branch %s' % branchname)
def checkout(self, branchname):
"""Switch working directory to branchname."""
return go('hg checkout %s' % branchname)
def add(self, files):
"""Add new files to the INSTALLBRANCH"""
try:
br = self.branch()
assert br == self.INSTALLBRANCH
except AssertionError:
self.checkout(self.INSTALLBRANCH)
return go("git add %s" % files)
def remove(self, files):
"""Remove files from the repository."""
self.checkout(self.INSTALLBRANCH)
out = go("git rm -f %s" % files)
out += go("git commit -m 'figit: deleted %s'" % files)
self.checkout(self._init_branch)
out += self.merge("figit: merging deletion of %s" % files,
self._init_branch, self.INSTALLBRANCH)
return out
def diff(self):
"""Return one filename per line. Ignore Added files."""
return go('git diff --name-only --diff-filter=MRC %s'
% self.INSTALLBRANCH).split()
def merge(self, message, to_branch, from_branch):
self.checkout(to_branch)
out = go('git merge "%s" %s %s'
% (message, to_branch, from_branch))
if self.branch() != self._init_branch:
self.checkout(self._init_branch)
return out
def commit(self, message, files):
return go("git commit -m '%s' %s" % (message, files))
def commitall(self, message):
return go("git commit -a -m '%s'" % message)
| Python |
# Git VCS wrapper/backend for FIgit.
import os
from commands import getoutput as go
from os.path import join, sep
class Git:
def __init__(self, wd, INSTALLBRANCH):
self.ignorefile = '.gitignore'
self.ignore_patterns = ['*~', '*.pyc', 'manifest.bak']
self.wd = wd
self.installdir = wd
self.INSTALLBRANCH = INSTALLBRANCH
self._init_branch = self.branch()
assert self._init_branch != INSTALLBRANCH
def initdb(self):
"""Initialize a vcs working directory."""
out = go('git init-db') + '\n'
vcsignorefile = open(self.ignorefile, 'w')
for pattern in self.ignore_patterns:
vcsignorefile.write("%s\n" % pattern)
vcsignorefile.close()
out += go('git add .')
out += go("git commit -a -m 'figit: Initial commit.'")
out += go('git branch %s' % self.INSTALLBRANCH)
return out
def branch(self, branchname=None):
"""Create a new branch or report the current branch name."""
if branchname is None: # Just report the current branch name.
try:
return open(join(".git", "HEAD")).read().split(sep)[-1].strip()
except IOError:
return None
else: # Create the named branch.
return go('git branch %s' % branchname)
def checkout(self, branchname):
"""Switch working directory to branchname."""
return go('git checkout %s' % branchname)
def add(self, files):
"""Add new files to the INSTALLBRANCH"""
try:
br = self.branch()
assert br == self.INSTALLBRANCH
except AssertionError:
self.checkout(self.INSTALLBRANCH)
return go("git add %s" % files)
def remove(self, files):
"""Remove files from the repository."""
self.checkout(self.INSTALLBRANCH)
out = go("git rm -f %s" % files)
out += go("git commit -m 'figit: deleted %s'" % files)
self.checkout(self._init_branch)
out += self.merge("figit: merging deletion of %s" % files,
self._init_branch, self.INSTALLBRANCH)
return out
def diff(self):
"""Return one filename per line. Ignore Added files."""
return go('git diff --name-only --diff-filter=MRC %s'
% self.INSTALLBRANCH).split()
def merge(self, message, to_branch, from_branch):
self.checkout(to_branch)
out = go('git merge "%s" %s %s'
% (message, to_branch, from_branch))
if self.branch() != self._init_branch:
self.checkout(self._init_branch)
return out
def commit(self, message, files):
return go("git commit -m '%s' %s" % (message, files))
def commitall(self, message):
return go("git commit -a -m '%s'" % message)
| Python |
# Class for reading the RunBefore/Runafter files.
from os.path import join
def Run(runfile):
"""
Run creates a dictionary which is a mapping of filenames and command lines.
This dictionary is read when figit.install() is called in order to determine
what, if any runbefore or runafter scripts need to be run on the
distribution hosts. If a filename in the upload list matches a name in the
dictionary returned by this class, then the script name is run.
"""
# Since the RunBefore/After files are intended as part of the user interface
# we try to handle hand editing of them gracefully.
run = {}
fd = open(join(".figit", runfile), 'r')
for line in fd.readlines():
if line[0] not in (';','#', '"', "'"): # Ignore comments
i = line.replace('\t', ' ').index(' ') # convert tabs to space
filename = line[:i]
cmmd = line[i:].strip()
try:
comment = cmmd.index('#') # Chop off any EOL user comments.
run[filename] = cmmd[:comment].strip()
except ValueError:
run[filename] = cmmd
fd.close()
return run
| Python |
#!/usr/bin/env python
from distutils.core import setup
#from setuptools import setup, find_packages
setup(
name='Figit',
version='0.1',
packages=['Figit', 'Figit.channels', 'Figit.vcs'],
scripts=['bin/figit.py'],
#packages = find_packages(),
# for SSH connections we use the paramiko library.
#install_requires = ['paramiko>=1.6.4'],
# Meta data for PyPI upload.
author='Jake Davis',
author_email='mrsalty0@gmail.com',
description='Config file version control via ssh for the impatient',
license = 'GPL',
url='http://code.google.com/p/figit/',
)
| Python |
#!/usr/bin/env python
from distutils.core import setup
#from setuptools import setup, find_packages
setup(
name='Figit',
version='0.1',
packages=['Figit', 'Figit.channels', 'Figit.vcs'],
scripts=['bin/figit.py'],
#packages = find_packages(),
# for SSH connections we use the paramiko library.
#install_requires = ['paramiko>=1.6.4'],
# Meta data for PyPI upload.
author='Jake Davis',
author_email='mrsalty0@gmail.com',
description='Config file version control via ssh for the impatient',
license = 'GPL',
url='http://code.google.com/p/figit/',
)
| Python |
# Local filesystem access channel for Figit.
# import os
# import sha
# class LocalChannel:
# """
# A Class with functions for examining and manipulating a source
# directory that resides on the local file system.
# """
# # Until someone thinks of a better way, we'll brute force permissions
# # using sudo whenever the first write attempt fails.
# import pwd
# import grp
# from shutil import copyfile
# from stat import S_ISDIR
# from commands import getoutput as go
# def __init__(self):
# pass
# def ls(self, rpath):
# return os.listdir(rpath)
# def digest(rpath, sudopw='No'):
# """Returns a sha hash of the file content of remote path."""
# try:
# fobj = open(rpath)
# chmodded = False
# except IOError:
# # Force world readableness
# go("echo %s | sudo chmod o+r %s" % (sudopw, rpath))
# chmodded = True
# fobj = open(rpath)
# m = sha.new()
# while True:
# d = fobj.read(8096)
# if not d:
# break
# m.update(d)
# fobj.close()
# if chmodded == True:
# # Undo world readableness
# go("echo %s | sudo chmod o-r %s" % (sudopw, rpath))
# return m.hexdigest()
#
# def stats(self, rpath, sudopw='No'):
# """Returns (uid, gid, mode, filetype, sha1hash) for rpath."""
# info = os.stat(rpath)
# uid = pwd.getpwuid(info[4])[0]
# gid = grp.getgrgid(info[5])[0]
# mode = oct(info[0])[-4:]
# if S_ISDIR(info.st_mode):
# filetype = 'directory'
# else:
# filetype = 'file'
# newdigest = self.digest(rpath, sudopw)
# return (uid, gid, mode, filetype, newdigest)
# def chmod(self, rpath, ownership, mode, sudopw='No'):
# """Set file ownership and mode on the remote path."""
# out = go("echo %s | sudo chown %s %s" % (sudopw, ownership, rpath))
# out += go("echo %s | sudo chmod %s %s" % (sudopw, mode, rpath))
# return out
#
# def get(self, rpath, localpath, sudopw='No'):
# """download a file"""
# try:
# copyfile(rpath, localpath)
# except IOError:
# # Force world readableness
# go("echo %s | sudo chmod o+r %s" % (sudopw, rpath))
# copyfile(rpath, localpath)
# # Undo world readableness
# go("echo %s | sudo chmod o-r %s" % (sudopw, rpath))
#
# def put(self, localpath, rpath, sudopw='No'):
# """Copy a file to the remote directory."""
# try:
# copyfile(localpath, rpath)
# except IOError:
# # force world readableness
# go("echo %s | sudo chmod o+r %s" % (sudopw, rpath))
# copyfile(localpath, rpath)
# # undo world readableness
# go("echo %s | sudo chmod o-r %s" % (sudopw, rpath))
# def rename(self, rpath, sudopw='No'):
# """Rename a file with a '.bak' extension."""
# try:
# os.rename(rpath, rpath+'.bak')
# except IOError:
# # force world readableness
# go("echo %s | sudo chmod o+r %s" % (sudopw, rpath))
# os.rename(rpath, rpath+'.bak')
| Python |
# SSH remote filesystem access channel for Figit.
import os
import sha
import getpass
from paramiko import SSHClient, SSHException
from paramiko.util import log_to_file, load_host_keys
class SSHChannel:
"""
A Class with functions for examining and manipulating a source directory
that resides on a remote system.
"""
# Until someone thinks of a better way, we'll brute force permissions
# using sudo whenever the first write attempt fails.
def __init__(self, user, host, port, sudopw):
log_to_file('/var/tmp/figit-ssh.log') # TODO: make this os agnostic.
self.port = int(port)
self.sudopw = sudopw
self.client = SSHClient()
self.client.load_system_host_keys()
try: # 1st try an ssh key
self.client.connect(host, port, user)
# Versions of paramiko prior to 1.7 fail here. If you have v1.6.4
# or earlier, specifying key_filename should fix the problem.
# key_filename=os.path.expanduser('~/.ssh/id_rsa')
except SSHException:
try: # next try the sudo password
password = sudopw
self.client.connect(host, port, user, password)
except SSHException: # finally ask the user for a password.
password = getpass.getpass("Enter ssh password for %s: " % user)
self.client.connect(host, port, user, password)
self.sftp = self.client.open_sftp()
def run(self, command):
"""Run a command on the remote host."""
stdin, stdout, stderr = self.client.exec_command(command)
errors = stderr.readlines()
out = stdout.readlines()
stdin.close()
stdout.close()
stderr.close()
if errors != []:
return errors
else:
return out
def ls(self, rpath):
return self.sftp.listdir(rpath)
def digest(self, rpath):
"""Returns a sha1 hash of the file content of remote path."""
try:
fobj = self.sftp.open(rpath)
chmodded = False
except IOError:
# Force world readableness
self.run("echo %s | sudo chmod o+r %s" % (self.sudopw, rpath))
chmodded = True
fobj = self.sftp.open(rpath)
m = sha.new()
while True:
d = fobj.read(8096)
if not d:
break
m.update(d)
fobj.close()
if chmodded == True:
# Undo world readableness
self.run("echo %s | sudo chmod o-r %s" % (self.sudopw, rpath))
return m.hexdigest()
def stats(self, rpath):
"""Returns (uid, gid, mode, filetype, sha1hash) for rpath."""
newdigest = self.digest(rpath)
stdin, stdout, stderr = self.client.exec_command(
"""stat -c %U:%G:%a:%F """ + rpath)
uid, gid, mode, filetype = stdout.read().split(':')
stdin.close()
stdout.close()
stderr.close()
# The stat shell command often returns a three digit mode, e.g. "644"
# so zfill is used to pad the result with leading zeros, e.g. "0644".
return (uid, gid, mode.zfill(4), filetype.strip(), newdigest)
def chmod(self, rpath, ownership, mode):
"""Set file ownership and permissions mode on remote path"""
out = self.run("echo %s | sudo chown %s %s"
% (self.sudopw, ownership, rpath))
out += self.run("echo %s | sudo chmod %s %s"
% (self.sudopw, mode, rpath))
return out
def get(self, rpath, localpath):
"""download a file"""
try:
self.sftp.get(rpath, localpath)
except IOError:
# Force world readableness
out = self.run("echo %s | sudo chmod o+r %s" % (self.sudopw, rpath))
self.sftp.get(rpath, localpath)
# Undo world readableness
out = self.run("echo %s | sudo chmod o-r %s" % (self.sudopw, rpath))
def put(self, localpath, rpath, ownership, mode):
"""Upload a file to the remote host."""
try: # make sure the file exists
self.sftp.normalize(rpath)
except IOError:
try: # make sure the directory exists before creating file.
self.sftp.normalize(os.path.dirname(rpath))
except IOError:
self.run("echo %s | sudo mkdir %s" % (self.sudopw,
os.path.dirname(rpath)))
self.run("echo %s | sudo touch %s" % (self.sudopw, rpath))
try:
self.sftp.put(localpath, rpath)
except IOError:
# Remember permissions of parent directory, then upload file.
dmode = self.run("stat -c %a " + os.path.dirname(rpath))[0].strip()
if dmode[-1] < 7:
dirmodded = True
self.run("echo %s | sudo chmod %s %s" % (self.sudopw, 'o+rwx',
os.path.dirname(rpath)))
else:
dirmodded = False
self.run("echo %s | sudo chmod %s %s" % (self.sudopw, '666', rpath))
self.sftp.put(localpath, rpath)
if dirmodded: # Change parent mode back to what it was.
self.run("echo %s | sudo chmod %s %s" % (self.sudopw, dmode,
os.path.dirname(rpath)))
return self.chmod(rpath, ownership, mode)
def rename(self, rpath):
"""rename a file .bak"""
try:
self.sftp.rename(rpath, rpath+'.bak')
except IOError:
return self.run("echo %s | sudo mv %s %s"
% (self.sudopw, rpath, rpath+'.bak'))
| Python |
# Some common funtions used by Figit.
import sys
import os
import sha
from os.path import join, sep
from fnmatch import fnmatch
def quit(msg="Done"):
"""Clean up and exit."""
try:
M.commit()
channel.client.close()
V.checkout(DEVBRANCH)
except: pass
print msg
sys.exit(0)
def get_message(prompt):
text = raw_input(prompt)
text = text.replace('"', '')
text = text.replace("'", '')
return text
def fixpath(filename, src, wd):
"""
Returns a three key dictionary of the following strings for filename:
wp = fully qualified working directory path.
sp = fully qualified source path (minus "user@hostname:").
rp = relative path from base of wd or src.
"""
if filename.startswith(src):
return {'wp':filename.replace(src, wd),
'sp':filename,
'rp':filename.replace(src+sep,'')}
elif filename.startswith(wd):
return {'wp':filename,
'sp':filename.replace(wd,src),
'rp':filename.replace(wd+sep,'')}
else:
filename = os.path.abspath(filename)
if filename.startswith(wd):
return {'wp':filename,
'sp':filename.replace(wd,src),
'rp':filename.replace(wd+sep,'')}
else:
raise ('PathError', "%s is not a valid working directory path."
% filename)
def list2string(filelist):
"""
Converts a list object (presumably containing filenames) to a string
for use as a shell command line argument.
"""
filenamestring = ''
for f in filelist:
filenamestring += f + ' '
return filenamestring.rstrip() # Chop off very last space.
def getconf():
"""Read in values from figit config files"""
# TODO: Add provision for calling from a subdirectory of wd.
# TODO: Add more error checking.
confile = open( join(os.getcwd(), ".figit", "config"), 'r' )
wd_config_line = confile.readline()
assert wd_config_line.startswith('wd:')
wd = wd_config_line[3:-1]
src_config_line = confile.readline()
assert src_config_line.startswith('src:')
src = src_config_line[4:-1]
if src.split(':').__len__() == 2:
port = 22
elif src.split(':').__len__() == 3:
l = src[4:-1].split(':')
src = l[0] + ':' + l[2]
port = int(l[1])
hostlist = []
hosts = confile.readlines()
if hosts != []:
for h in hosts:
assert h.startswith('host:')
hostlist.append(h.split(':')[1].rstrip())
if src.count(':') == 1:
usersrchost, src = src.split(':')
user, srchost = usersrchost.split('@')
else:
user, srchost = None, None
hostlist.insert(0, srchost) # I think Source Host should always be first.
return {'src':src, 'port':port, 'wd':wd, 'user':user, 'hosts':hostlist}
def wddigest(filename):
"""Return the sha digest of a file in the working directory."""
fobj = open(filename)
m = sha.new()
while True:
d = fobj.read(8096)
if not d:
break
m.update(d)
fobj.close()
return m.hexdigest()
def wddiff(VCS, IGNORE):
"""
Compares WD files and INSTALLBRANCH files.
Returns a list of files, other than figit ignore files, that have changed in
the WD.
"""
assert VCS.INSTALLBRANCH != VCS.branch()
print "Diffing %s and %s" % (VCS.branch(), VCS.INSTALLBRANCH)
changedfiles = VCS.diff()
print "CHANGEDFILES: %s" % changedfiles
cflist = []
for f in changedfiles:
cflist.append(f)
# Remove Figit IGNORE files which should not be distributed.
for pattern in IGNORE:
if fnmatch(f, pattern):
print "Removing %s from change list." % f
cflist.remove(f)
return cflist
def remotediff(manifest, src, wd, channel, filenames):
"""
Compares new sha hashes of the files on the distribution host against the
sha hashes in the current manifest. Returns a list of files that have
changed on the distribution host.
"""
changedfiles = []
for f in filenames:
fp = fixpath(f, src, wd)
olddigest = manifest.manifest[fp['rp']][-1]
try:
# An IOError here probably means the file doesn't exist.
channel.sftp.normalize(fp['sp'])
except IOError:
continue
newdigest = channel.digest(fp['sp'])
if olddigest == newdigest:
print "Remote copy of %s appears consistent." % fp['rp']
else:
changedfiles.append(fp['sp'])
return changedfiles
def get_channel(user, host, port, sudopw):
"""Return the appropriate class depending on the value of host."""
# TODO: udpate the local.py so that we have a working local channel.
# if host is None:
# from Figit.channels import local
# return local.LocalChannel()
# else:
# from Figit.channels import ssh
# return ssh.SSHChannel(user, host, port, sudopw)
from Figit.channels import ssh
return ssh.SSHChannel(user, host, port, sudopw)
def get_vcs(vcsname, wd, INSTALLBRANCH):
"""Return the appropriate class depending on what VCSNAME is set to."""
if vcsname.lower() == 'git':
from Figit.vcs import git
return git.Git(wd, INSTALLBRANCH)
else:
# CVS, SVN, BZR....
pass
| Python |
# Class for managing the manifest repository file.
from os import rename
from os.path import join
class Manifest:
"""
Class for managing the manifest repository file.
Always pass relative paths to these functions. [see: utils.fixpath()]
"""
# Since the manifest file is intended as part of the user interface we try
# to handle hand editing of it gracefully.
# Note that uid and gid, which may be different from machine to machine,
# must _not_ be recorded numerically. Names are used instead.
def __init__(self, wd, init_branch):
"""Initialize a dictionary object with data from the manifest file."""
self.init_branch = init_branch
self.wd = wd
self.manifest = {}
manifile = open(join(".figit", "manifest"), 'r')
for line in manifile:
if line[0] not in (';','#', '"', "'"): # Ignore comments
try:
filename = line.split()[0]
stats = line.split()[1:]
self.manifest[filename] = stats
except IndexError:
pass
manifile.close()
def update(self, filename, stats): # update is synonymous with add.
"""Add filename to manifest."""
if type(stats) == type(''):
self.manifest[filename] = stats.split()
else:
self.manifest[filename] = stats
def pop(self, filename):
"""Remove filename from manifest."""
try:
self.manifest.pop(filename)
except KeyError:
raise KeyError, "%s not found in the manifest." % filename
def commit(self, current_branch):
"""Commit the in-memory manifest dictionary to disk."""
# Note that if we were not on the same branch we initialized form,
# commiting now would really f*ck sh*t up.
try:
assert current_branch == self.init_branch
except AssertionError:
print "init_branch = %s \ncurrent_branch = %s" % (self.init_branch,
current_branch)
L = self.manifest.keys()
L.sort()
# Backup old file. first
rename(join(".figit", "manifest"), "%s" % join(".figit", "manifest") + ".bak")
manifile = open(join(".figit", "manifest"), 'w')
for key in L:
# Write (filename, ownership, mode, digest)
manifile.write("%s %s %s %s\n" % (key, self.manifest[key][0],
self.manifest[key][1],
self.manifest[key][2]))
manifile.close()
| Python |
# Git VCS wrapper/backend for FIgit.
import os
from commands import getoutput as go
from os.path import join, sep
class Git:
def __init__(self, wd, INSTALLBRANCH):
self.wd = wd
self.ignorefile = '.gitignore'
self.ignore_patterns = ['*~', '*.pyc', 'manifest.bak']
self.INSTALLBRANCH = INSTALLBRANCH
def initdb(self):
"""Initialize a vcs working directory."""
out = go('git init-db')
vcsignorefile = open(self.ignorefile, 'w')
for pattern in self.ignore_patterns:
vcsignorefile.write("%s\n" % pattern)
vcsignorefile.close()
out += go('git add .')
out += go("git commit -a -m 'figit: Initial commit.'")
out += go('git branch %s' % self.INSTALLBRANCH)
return out
def branch(self, branchname=None):
"""Create a new branch or report the current branch name."""
if branchname is None: # Just report the current branch name.
return open(join(".git", "HEAD")).read().split(sep)[-1].strip()
else: # Create the named branch.
return go('git branch %s' % branchname)
def checkout(self, branchname):
"""Switch working directory to branchname."""
return go('git checkout %s' % branchname)
def add(self, files):
"""Add new files to the INSTALLBRANCH"""
try:
br = self.branch()
assert br == self.INSTALLBRANCH
except AssertionError:
self.checkout(self.INSTALLBRANCH)
return go("git add %s" % files)
def remove(self, files):
"""Remove files from the repository."""
_init_branch = self.branch()
assert _init_branch != self.INSTALLBRANCH
self.checkout(self.INSTALLBRANCH)
out = go("git rm -f %s" % files)
out += go("git commit -m 'figit: deleted %s'" % files)
self.checkout(_init_branch)
out += self.merge("figit: removed %s" % files,
_init_branch, self.INSTALLBRANCH)
return out
def diff(self):
"""Return one filename per line. Ignore Added files."""
return go('git diff --name-only --diff-filter=MRC %s'
% self.INSTALLBRANCH).split()
def merge(self, message, to_branch, from_branch):
_init_branch = self.branch()
self.checkout(to_branch)
out = go('git merge "%s" %s %s'
% (message, to_branch, from_branch))
if self.branch() != _init_branch:
self.checkout(_init_branch)
return out
def commit(self, message, files):
return go("git commit -m '%s' %s" % (message, files))
def commitall(self, message):
return go("git commit -a -m '%s'" % message)
| Python |
# Class for reading the RunBefore/Runafter files.
from os.path import join
def Run(runfile):
"""
Run creates a dictionary which is a mapping of filenames and command lines.
This dictionary is read when figit.install() is called in order to determine
what, if any runbefore or runafter scripts need to be run on the
distribution hosts. If a filename in the upload list matches a name in the
dictionary returned by this class, then the script name is run.
"""
# Since the RunBefore/After files are intended as part of the user interface
# we try to handle hand editing of them gracefully.
run = {}
fd = open(join(".figit", runfile), 'r')
for line in fd.readlines():
if line[0] not in (';','#', '"', "'"): # Ignore comments
i = line.replace('\t', ' ').index(' ') # convert tabs to space
filename = line[:i]
cmmd = line[i:].strip()
try:
comment = cmmd.index('#') # Chop off any EOL user comments.
run[filename] = cmmd[:comment].strip()
except ValueError:
run[filename] = cmmd
fd.close()
return run
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import exceptions
class Error(exceptions.StandardError):
pass
class Warning(exceptions.StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import errno
import logging
import re
import time
from net import gorpc
from vtdb import tablet2
from vtdb import dbexceptions
# NOTE(msolomon) this sketchy import allows upstream code to mostly interpret
# our exceptions as if they came from MySQLdb. Good for a cutover, probably
# bad long-term.
import MySQLdb as MySQLErrors
_errno_pattern = re.compile('\(errno (\d+)\)')
# NOTE(msolomon) This mapping helps us mimic the behavior of mysql errors
# even though the relationship between connections and failures is now quite
# different. In general, we map vtocc errors to DatabaseError, unless there
# is a pressing reason to be more precise. Otherwise, these errors can get
# misinterpreted futher up the call chain.
_mysql_error_map = {
1062: MySQLErrors.IntegrityError,
}
# Errors fall into three classes based on recovery strategy.
#
# APP_LEVEL is for routine programmer errors (bad input etc) -- nothing can be
# done here, so just propagate the error upstream.
#
# RETRY means a simple reconnect (and immediate) reconnect to the same
# host will likely fix things. This is usually due vtocc restarting. In general
# this can be handled transparently unless the error is within a transaction.
#
# FATAL indicates that retrying an action on the host is likely to fail.
ERROR_APP_LEVEL = 'app_level'
ERROR_RETRY = 'retry'
ERROR_FATAL = 'fatal'
RECONNECT_DELAY = 0.002
# simple class to trap and re-export only variables referenced from the sql
# statement. bind dictionaries can be *very* noisy.
# this is by-product of converting the mysql %(name)s syntax to vtocc :name
class BindVarsProxy(object):
def __init__(self, bind_vars):
self.bind_vars = bind_vars
self.accessed_keys = set()
def __getitem__(self, name):
self.bind_vars[name]
self.accessed_keys.add(name)
return ':%s' % name
def export_bind_vars(self):
return dict([(k, self.bind_vars[k]) for k in self.accessed_keys])
# Provide compatibility with the MySQLdb query param style and prune bind_vars
class VtOCCConnection(tablet2.TabletConnection):
max_attempts = 2
def dial(self):
tablet2.TabletConnection.dial(self)
try:
response = self.client.call('OccManager.GetSessionId', self.dbname)
self.set_session_id(response.reply)
except gorpc.GoRpcError, e:
raise dbexceptions.OperationalError(*e.args)
def _convert_error(self, exception, *error_hints):
message = str(exception[0]).lower()
# NOTE(msolomon) extract a mysql error code so we can push this up the code
# stack. At this point, this is almost exclusively for handling integrity
# errors from duplicate key inserts.
match = _errno_pattern.search(message)
if match:
err = int(match.group(1))
elif isinstance(exception[0], IOError):
err = exception[0].errno
else:
err = -1
if message.startswith('fatal'):
# Force this error code upstream so MySQL code understands this as a
# permanent failure on this host. Feels a little dirty, but probably the
# most consistent way since this correctly communicates the recovery
# strategy upstream.
raise MySQLErrors.OperationalError(2003, str(exception), self.addr,
*error_hints)
elif message.startswith('retry'):
# Retry means that a trivial redial of this host will fix things. This
# is frequently due to vtocc being restarted independently of the mysql
# instance behind it.
error_type = ERROR_RETRY
elif 'curl error 7' in message:
# Client side error - sometimes the listener is unavailable for a few
# milliseconds during a restart.
error_type = ERROR_RETRY
elif err in (errno.ECONNREFUSED, errno.EPIPE):
error_type = ERROR_RETRY
else:
# Everything else is app level - just process the failure and continue
# to use the existing connection.
error_type = ERROR_APP_LEVEL
if error_type == ERROR_RETRY and self.transaction_id:
# With a transaction, you cannot retry, so just redial. The next action
# will be successful. Masquerade as commands-out-of-sync - an operational
# error that can be reattempted at the app level.
error_type = ERROR_APP_LEVEL
error_hints += ('cannot retry action within a transaction',)
try:
time.sleep(RECONNECT_DELAY)
self.dial()
except Exception, e:
# If this fails now, the code will retry later as the session_id
# won't be valid until the handshake finishes.
logging.warning('error dialing vtocc %s (%s)', self.addr, e)
exc_class = _mysql_error_map.get(err, MySQLErrors.DatabaseError)
return error_type, exc_class(err, str(exception), self.addr,
*error_hints)
def begin(self):
attempt = 0
while True:
try:
return tablet2.TabletConnection.begin(self)
except dbexceptions.OperationalError, e:
error_type, e = self._convert_error(e, 'begin')
if error_type == ERROR_RETRY:
attempt += 1
if attempt < self.max_attempts:
try:
time.sleep(RECONNECT_DELAY)
self.dial()
except dbexceptions.OperationalError, dial_error:
logging.warning('error dialing vtocc on begin %s (%s)',
self.addr, dial_error)
continue
logging.warning('Failing with 2003 on begin')
raise MySQLErrors.OperationalError(2003, str(e), self.addr, 'begin')
raise e
def commit(self):
try:
return tablet2.TabletConnection.commit(self)
except dbexceptions.OperationalError, e:
error_type, e = self._convert_error(e, 'commit')
raise e
def _execute(self, sql, bind_variables):
bind_vars_proxy = BindVarsProxy(bind_variables)
try:
# convert bind style from %(name)s to :name
sql = sql % bind_vars_proxy
except KeyError, e:
raise dbexceptions.InterfaceError(e[0], sql, bind_variables)
sane_bind_vars = bind_vars_proxy.export_bind_vars()
attempt = 0
while True:
try:
return tablet2.TabletConnection._execute(self, sql, sane_bind_vars)
except dbexceptions.OperationalError, e:
error_type, e = self._convert_error(e, sql, sane_bind_vars)
if error_type == ERROR_RETRY:
attempt += 1
if attempt < self.max_attempts:
try:
time.sleep(RECONNECT_DELAY)
self.dial()
except dbexceptions.OperationalError, dial_error:
logging.warning('error dialing vtocc on execute %s (%s)',
self.addr, dial_error)
continue
logging.warning('Failing with 2003 on %s: %s, %s', str(e), sql, sane_bind_vars)
raise MySQLErrors.OperationalError(2003, str(e), self.addr, sql, sane_bind_vars)
raise e
def connect(addr, timeout, dbname=None):
conn = VtOCCConnection(addr, dbname, timeout)
conn.dial()
return conn
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""times module
This module provides some Date and Time interface for vtdb
Use Python datetime module to handle date and time columns."""
from datetime import date, datetime, time, timedelta
from math import modf
from time import localtime
# FIXME(msolomon) what are these aliasesf for?
Date = date
Time = time
TimeDelta = timedelta
Timestamp = datetime
DateTimeDeltaType = timedelta
DateTimeType = datetime
def DateFromTicks(ticks):
"""Convert UNIX ticks into a date instance."""
return date(*localtime(ticks)[:3])
def TimeFromTicks(ticks):
"""Convert UNIX ticks into a time instance."""
return time(*localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
"""Convert UNIX ticks into a datetime instance."""
return datetime(*localtime(ticks)[:6])
def DateTimeOrNone(s):
if ' ' in s:
sep = ' '
elif 'T' in s:
sep = 'T'
else:
return DateOrNone(s)
try:
d, t = s.split(sep, 1)
return datetime(*[ int(x) for x in d.split('-')+t.split(':') ])
except:
return DateOrNone(s)
def TimeDeltaOrNone(s):
try:
h, m, s = s.split(':')
td = timedelta(hours=int(h), minutes=int(m), seconds=int(float(s)), microseconds=int(modf(float(s))[0]*1000000))
if h < 0:
return -td
else:
return td
except:
return None
def TimeOrNone(s):
try:
h, m, s = s.split(':')
return time(hour=int(h), minute=int(m), second=int(float(s)), microsecond=int(modf(float(s))[0]*1000000))
except:
return None
def DateOrNone(s):
try: return date(*[ int(x) for x in s.split('-',2)])
except: return None
def DateToString(d):
return d.strftime("%Y-%m-%d")
def DateTimeToString(dt):
return dt.strftime("%Y-%m-%d %H:%M:%S")
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from array import array
import datetime
from decimal import Decimal
from vtdb import times
# These numbers should exactly match values defined in dist/vt-mysql-5.1.52/include/mysql/mysql_com.h
VT_DECIMAL = 0
VT_TINY = 1
VT_SHORT = 2
VT_LONG = 3
VT_FLOAT = 4
VT_DOUBLE = 5
VT_NULL = 6
VT_TIMESTAMP = 7
VT_LONGLONG = 8
VT_INT24 = 9
VT_DATE = 10
VT_TIME = 11
VT_DATETIME = 12
VT_YEAR = 13
VT_NEWDATE = 14
VT_VARCHAR = 15
VT_BIT = 16
VT_NEWDECIMAL = 246
VT_ENUM = 247
VT_SET = 248
VT_TINY_BLOB = 249
VT_MEDIUM_BLOB = 250
VT_LONG_BLOB = 251
VT_BLOB = 252
VT_VAR_STRING = 253
VT_STRING = 254
VT_GEOMETRY = 255
# FIXME(msolomon) intended for MySQL emulation, but seems more dangerous
# to keep this around. This doesn't seem to even be used right now.
def Binary(x):
return array('c', x)
class DBAPITypeObject:
def __init__(self, *values):
self.values = values
def __cmp__(self, other):
if other in self.values:
return 0
return 1
# FIXME(msolomon) why do we have these values if they aren't referenced?
STRING = DBAPITypeObject(VT_ENUM, VT_VAR_STRING, VT_STRING)
BINARY = DBAPITypeObject(VT_TINY_BLOB, VT_MEDIUM_BLOB, VT_LONG_BLOB, VT_BLOB)
NUMBER = DBAPITypeObject(VT_DECIMAL, VT_TINY, VT_SHORT, VT_LONG, VT_FLOAT, VT_DOUBLE, VT_LONGLONG, VT_INT24, VT_YEAR, VT_NEWDECIMAL)
DATETIME = DBAPITypeObject(VT_TIMESTAMP, VT_DATE, VT_TIME, VT_DATETIME, VT_NEWDATE)
ROWID = DBAPITypeObject()
conversions = {
VT_DECIMAL : Decimal,
VT_TINY : int,
VT_SHORT : int,
VT_LONG : long,
VT_FLOAT : float,
VT_DOUBLE : float,
VT_TIMESTAMP : times.DateTimeOrNone,
VT_LONGLONG : long,
VT_INT24 : int,
VT_DATE : times.DateOrNone,
VT_TIME : times.TimeDeltaOrNone,
VT_DATETIME : times.DateTimeOrNone,
VT_YEAR : int,
VT_NEWDATE : times.DateOrNone,
VT_BIT : int,
VT_NEWDECIMAL : Decimal,
}
NoneType = type(None)
# FIXME(msolomon) we could make a SqlLiteral ABC and just type check.
# That doens't seem dramatically better than __sql_literal__ but it might
# be move self-documenting.
def convert_bind_vars(bind_variables):
new_vars = {}
for key, val in bind_variables.iteritems():
if hasattr(val, '__sql_literal__'):
new_vars[key] = val.__sql_literal__()
elif isinstance(val, datetime.datetime):
new_vars[key] = times.DateTimeToString(val)
elif isinstance(val, datetime.date):
new_vars[key] = times.DateToString(val)
elif isinstance(val, (int, long, float, str, NoneType)):
new_vars[key] = val
else:
# NOTE(msolomon) begrudgingly I allow this - we just have too much code
# that relies on this.
# This accidentally solves our hideous dependency on mx.DateTime.
new_vars[key] = str(val)
return new_vars
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from net import mc_bson_request
from vtdb import dbexceptions
class BaseCursor(object):
arraysize = 1
lastrowid = None
rowcount = 0
results = None
connection = None
description = None
index = None
def __init__(self, connection):
self.connection = connection
def close(self):
self.connection = None
self.results = None
# pass kargs here in case higher level APIs need to push more data through
# for instance, a key value for shard mapping
def _execute(self, sql, bind_variables, **kargs):
self.rowcount = 0
self.results = None
self.description = None
self.lastrowid = None
sql_check = sql.strip().lower()
if sql_check == 'begin':
self.connection.begin()
return
elif sql_check == 'commit':
self.connection.commit()
return
elif sql_check == 'rollback':
self.connection.rollback()
return
self.results, self.rowcount, self.lastrowid, self.description = self.connection._execute(sql, bind_variables, **kargs)
self.index = 0
return self.rowcount
def fetchone(self):
if self.results is None:
raise dbexceptions.ProgrammingError('fetch called before execute')
if self.index >= len(self.results):
return None
self.index += 1
return self.results[self.index-1]
def fetchmany(self, size=None):
if self.results is None:
raise dbexceptions.ProgrammingError('fetch called before execute')
if self.index >= len(self.results):
return []
if size is None:
size = self.arraysize
res = self.results[self.index:self.index+size]
self.index += size
return res
def fetchall(self):
if self.results is None:
raise dbexceptions.ProgrammingError('fetch called before execute')
return self.fetchmany(len(self.results)-self.index)
def callproc(self):
raise dbexceptions.NotSupportedError
def executemany(self, *pargs):
raise dbexceptions.NotSupportedError
def nextset(self):
raise dbexceptions.NotSupportedError
def setinputsizes(self, sizes):
pass
def setoutputsize(self, size, column=None):
pass
@property
def rownumber(self):
return self.index
def __iter__(self):
return self
def next(self):
val = self.fetchone()
if val is None:
raise StopIteration
return val
# A simple cursor intended for attaching to a single tablet server.
class TabletCursor(BaseCursor):
def execute(self, sql, bind_variables=None):
return self._execute(sql, bind_variables)
# Standard cursor when connecting to a sharded backend.
class Cursor(BaseCursor):
def execute(self, sql, bind_variables=None, key=None, keys=None):
try:
return self._execute(sql, bind_variables, key=key, keys=keys)
except mc_bson_request.MCBSonException, e:
if str(e) == 'unavailable':
self.connection._load_tablets()
raise
class KeyedCursor(BaseCursor):
def __init__(self, connection, key=None, keys=None):
self.key = key
self.keys = keys
BaseCursor.__init__(self, connection)
def execute(self, sql, bind_variables):
return self._execute(sql, bind_variables, key=self.key, keys=self.keys)
class BatchCursor(BaseCursor):
def __init__(self, connection):
self.exec_list = []
BaseCursor.__init__(self, connection)
def execute(self, sql, bind_variables=None, key=None, keys=None):
self.exec_list.append(BatchQueryItem(sql, bind_variables, key, keys))
def flush(self):
self.rowcount = self.connection._exec_batch(self.exec_list)
self.exec_list = []
# just used for batch items
class BatchQueryItem(object):
def __init__(self, sql, bind_variables, key, keys):
self.sql = sql
self.bind_variables = bind_variables
self.key = key
self.keys = keys
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from itertools import izip
import logging
from net import bsonrpc
from net import gorpc
from vtdb import cursor
from vtdb import dbexceptions
from vtdb import field_types
# A simple, direct connection to the voltron query server.
# This is shard-unaware and only handles the most basic communication.
class TabletConnection(object):
transaction_id = 0
session_id = 0
default_cursorclass = cursor.TabletCursor
def __init__(self, addr, dbname, timeout):
self.addr = addr
self.dbname = dbname
self.timeout = timeout
self.client = bsonrpc.BsonRpcClient(self.uri, self.timeout)
self.cursorclass = self.default_cursorclass
def dial(self):
if self.client:
self.client.close()
self.transaction_id = 0
self.session_id = 0
# You need to obtain and set the session_id for things to work.
def set_session_id(self, session_id):
self.session_id = session_id
@property
def uri(self):
return 'http://%s/_bson_rpc_' % self.addr
def close(self):
self.rollback()
self.client.close()
__del__ = close
def _make_req(self):
return {'TransactionId': self.transaction_id,
'ConnectionId': 0,
'SessionId': self.session_id}
def begin(self):
if self.transaction_id:
raise dbexceptions.NotSupportedError('Nested transactions not supported')
req = self._make_req()
try:
response = self.client.call('SqlQuery.Begin', req)
self.transaction_id = response.reply
except gorpc.GoRpcError, e:
raise dbexceptions.OperationalError(*e.args)
def commit(self):
if not self.transaction_id:
return
req = self._make_req()
# NOTE(msolomon) Unset the transaction_id irrespective of the RPC's
# response. The intent of commit is that no more statements can be made on
# this transaction, so we guarantee that. Transient errors between the
# db and the client shouldn't affect this part of the bookkeeping.
# Do this after fill_session, since this is a critical part.
self.transaction_id = 0
try:
response = self.client.call('SqlQuery.Commit', req)
return response.reply
except gorpc.GoRpcError, e:
raise dbexceptions.OperationalError(*e.args)
def rollback(self):
if not self.transaction_id:
return
req = self._make_req()
# NOTE(msolomon) Unset the transaction_id irrespective of the RPC. If the
# RPC fails, the client will still choose a new transaction_id next time
# and the tablet server will eventually kill the abandoned transaction on
# the server side.
self.transaction_id = 0
try:
response = self.client.call('SqlQuery.Rollback', req)
return response.reply
except gorpc.GoRpcError, e:
raise dbexceptions.OperationalError(*e.args)
def cursor(self, cursorclass=None, **kargs):
return (cursorclass or self.cursorclass)(self, **kargs)
def _execute(self, sql, bind_variables):
new_binds = field_types.convert_bind_vars(bind_variables)
req = self._make_req()
req['Sql'] = sql
req['BindVariables'] = new_binds
fields = []
conversions = []
results = []
try:
response = self.client.call('SqlQuery.Execute', req)
reply = response.reply
for field in reply['Fields']:
fields.append((field['Name'], field['Type']))
conversions.append(field_types.conversions.get(field['Type']))
for row in reply['Rows']:
results.append(tuple(_make_row(row, conversions)))
rowcount = reply['RowsAffected']
lastrowid = reply['InsertId']
except gorpc.GoRpcError, e:
raise dbexceptions.OperationalError(*e.args)
except:
logging.exception('gorpc low-level error')
raise
return results, rowcount, lastrowid, fields
def _make_row(row, conversions):
converted_row = []
for conversion_func, field_data in izip(conversions, row):
if field_data is None:
v = None
elif conversion_func:
v = conversion_func(field_data)
else:
v = field_data
converted_row.append(v)
return converted_row
| Python |
from vtdb import vt_occ2 as db
from vttest import framework
from vttest import cache_cases
class TestCache(framework.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def set_env(self, env):
self.env = env
def test_num_str(self):
try:
self.env.execute("select bid, eid from vtocc_cached where eid = 1 and bid = 1")
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError), e:
self.assertContains(e[1], "error: Type")
else:
self.assertFail("Did not receive exception")
def test_nocache(self):
try:
self.env.log.reset()
self.env.execute("create table vtocc_nocache(eid int, primary key (eid)) comment 'vtocc_nocache'")
self.assertContains(self.env.log.read(), "Will not be cached")
finally:
self.env.execute("drop table vtocc_nocache")
def test_nopk(self):
try:
self.env.log.reset()
self.env.execute("create table vtocc_nocache(eid int)")
self.assertContains(self.env.log.read(), "Will not be cached")
finally:
self.env.execute("drop table vtocc_nocache")
def test_charcol(self):
try:
self.env.log.reset()
self.env.execute("create table vtocc_nocache(eid varchar(10), primary key (eid))")
self.assertContains(self.env.log.read(), "Will not be cached")
finally:
self.env.execute("drop table vtocc_nocache")
def test_uncache(self):
try:
# Verify row cache is working
self.env.execute("select * from vtocc_cached where eid = 2 and bid = 'foo'")
tstart = self.env.table_stats()["vtocc_cached"]
self.env.execute("select * from vtocc_cached where eid = 2 and bid = 'foo'")
tend = self.env.table_stats()["vtocc_cached"]
self.assertEqual(tstart["Hits"]+1, tend["Hits"])
# disable
self.env.execute("alter table vtocc_cached comment 'vtocc_nocache'")
self.env.execute("select * from vtocc_cached where eid = 2 and bid = 'foo'")
try:
tstart = self.env.table_stats()["vtocc_cached"]
except KeyError:
pass
else:
self.assertFail("Did not receive exception")
finally:
self.env.execute("alter table vtocc_cached comment ''")
# Verify row cache is working again
self.env.execute("select * from vtocc_cached where eid = 2 and bid = 'foo'")
tstart = self.env.table_stats()["vtocc_cached"]
self.env.execute("select * from vtocc_cached where eid = 2 and bid = 'foo'")
tend = self.env.table_stats()["vtocc_cached"]
self.assertEqual(tstart["Hits"]+1, tend["Hits"])
def test_rename(self):
try:
# Verify row cache is working
self.env.execute("select * from vtocc_cached where eid = 2 and bid = 'foo'")
tstart = self.env.table_stats()["vtocc_cached"]
self.env.execute("select * from vtocc_cached where eid = 2 and bid = 'foo'")
tend = self.env.table_stats()["vtocc_cached"]
self.assertEqual(tstart["Hits"]+1, tend["Hits"])
# rename
self.env.execute("alter table vtocc_cached rename to vtocc_cached2")
try:
tstart = self.env.table_stats()["vtocc_cached"]
except KeyError:
pass
else:
self.assertFail("Did not receive exception")
# Verify row cache is working
self.env.execute("select * from vtocc_cached2 where eid = 2 and bid = 'foo'")
tstart = self.env.table_stats()["vtocc_cached2"]
self.env.execute("select * from vtocc_cached2 where eid = 2 and bid = 'foo'")
tend = self.env.table_stats()["vtocc_cached2"]
self.assertEqual(tstart["Hits"]+1, tend["Hits"])
finally:
# alter table so there's no hash collision when renamed
self.env.execute("alter table vtocc_cached2 comment 'renmaed'")
self.env.execute("rename table vtocc_cached2 to vtocc_cached")
# Verify row cache is working again
self.env.execute("select * from vtocc_cached where eid = 2 and bid = 'foo'")
tstart = self.env.table_stats()["vtocc_cached"]
self.env.execute("select * from vtocc_cached where eid = 2 and bid = 'foo'")
tend = self.env.table_stats()["vtocc_cached"]
self.assertEqual(tstart["Hits"]+1, tend["Hits"])
def test_nopass(self):
try:
self.env.execute("begin")
self.env.execute("insert into vtocc_cached(eid, bid, name, foo) values(unix_time(), 'foo', 'bar', 'bar')")
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError), e:
self.assertContains(e[1], "error: DML too complex")
else:
self.assertFail("Did not receive exception")
finally:
self.env.execute("rollback")
def test_cache_sqls(self):
error_count = self.env.run_cases(cache_cases.cache_cases)
if error_count != 0:
self.assertFail("test_execution errors: %d"%(error_count))
| Python |
import time
from vtdb import vt_occ2 as db
from vttest import framework
from vttest import nocache_cases
class TestNocache(framework.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def set_env(self, env):
self.env = env
def test_data(self):
cu = self.env.execute("select * from vtocc_test where intval=1")
self.assertEqual(cu.description, [('intval', 3), ('floatval', 4), ('charval', 253), ('binval', 253)])
self.assertEqual(cu.rowcount, 1)
self.assertEqual(cu.fetchone(), (1, 1.12345, "\xc2\xa2", "\x00\xff"))
cu = self.env.execute("select * from vtocc_test where intval=2")
self.assertEqual(cu.fetchone(), (2, None, '', None))
def test_binary(self):
self.env.execute("begin")
binary_data = '\x00\'\"\b\n\r\t\x1a\\\x00\x0f\xf0\xff'
self.env.execute("insert into vtocc_test values(4, null, null, '\\0\\'\\\"\\b\\n\\r\\t\\Z\\\\\x00\x0f\xf0\xff')")
bvar = {}
bvar['bindata'] = binary_data
self.env.execute("insert into vtocc_test values(5, null, null, %(bindata)s)", bvar)
self.env.execute("commit")
cu = self.env.execute("select * from vtocc_test where intval=4")
self.assertEqual(cu.fetchone()[3], binary_data)
cu = self.env.execute("select * from vtocc_test where intval=5")
self.assertEqual(cu.fetchone()[3], binary_data)
self.env.execute("begin")
self.env.execute("delete from vtocc_test where intval in (4,5)")
self.env.execute("commit")
def test_simple_read(self):
vstart = self.env.debug_vars()
cu = self.env.execute("select * from vtocc_test limit 2")
vend = self.env.debug_vars()
self.assertEqual(cu.rowcount, 2)
self.assertEqual(vstart.mget("Queries.TotalCount", 0)+1, vend.Queries.TotalCount)
self.assertEqual(vstart.mget("Queries.Histograms.PASS_SELECT.Count", 0)+1, vend.Queries.Histograms.PASS_SELECT.Count)
self.assertNotEqual(vend.Voltron.ConnPool.Size, 0)
def test_commit(self):
vstart = self.env.debug_vars()
self.env.execute("begin")
self.assertNotEqual(self.env.conn.transaction_id, 0)
self.env.execute("insert into vtocc_test (intval, floatval, charval, binval) values(4, null, null, null)")
self.env.execute("commit")
cu = self.env.execute("select * from vtocc_test")
self.assertEqual(cu.rowcount, 4)
self.env.execute("begin")
self.env.execute("delete from vtocc_test where intval=4")
self.env.execute("commit")
cu = self.env.execute("select * from vtocc_test")
self.assertEqual(cu.rowcount, 3)
vend = self.env.debug_vars()
# We should have at least one connection
self.assertNotEqual(vend.Voltron.TxPool.Size, 0)
self.assertEqual(vstart.mget("Transactions.TotalCount", 0)+2, vend.Transactions.TotalCount)
self.assertEqual(vstart.mget("Transactions.Histograms.Completed.Count", 0)+2, vend.Transactions.Histograms.Completed.Count)
self.assertEqual(vstart.mget("Queries.TotalCount", 0)+4, vend.Queries.TotalCount)
self.assertEqual(vstart.mget("Queries.Histograms.PLAN_INSERT_PK.Count", 0)+1, vend.Queries.Histograms.PLAN_INSERT_PK.Count)
self.assertEqual(vstart.mget("Queries.Histograms.DML_PK.Count", 0)+1, vend.Queries.Histograms.DML_PK.Count)
self.assertEqual(vstart.mget("Queries.Histograms.PASS_SELECT.Count", 0)+2, vend.Queries.Histograms.PASS_SELECT.Count)
def test_integrity_error(self):
vstart = self.env.debug_vars()
self.env.execute("begin")
try:
self.env.execute("insert into vtocc_test values(1, null, null, null)")
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError), e:
self.assertEqual(e[0], 1062)
self.assertContains(e[1], "error: Duplicate")
else:
self.assertFail("Did not receive exception")
finally:
self.env.execute("rollback")
vend = self.env.debug_vars()
self.assertEqual(vstart.mget("Errors.DupKey", 0)+1, vend.Errors.DupKey)
def test_rollback(self):
vstart = self.env.debug_vars()
self.env.execute("begin")
self.assertNotEqual(self.env.conn.transaction_id, 0)
self.env.execute("insert into vtocc_test values(4, null, null, null)")
self.env.execute("rollback")
cu = self.env.execute("select * from vtocc_test")
self.assertEqual(cu.rowcount, 3)
vend = self.env.debug_vars()
self.assertNotEqual(vend.Voltron.TxPool.Size, 0)
self.assertEqual(vstart.mget("Transactions.TotalCount", 0)+1, vend.Transactions.TotalCount)
self.assertEqual(vstart.mget("Transactions.Histograms.Aborted.Count", 0)+1, vend.Transactions.Histograms.Aborted.Count)
def test_nontx_dml(self):
vstart = self.env.debug_vars()
try:
self.env.execute("insert into vtocc_test values(4, null, null, null)")
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError), e:
self.assertContains(e[1], "error: DMLs")
else:
self.assertFail("Did not receive exception")
vend = self.env.debug_vars()
self.assertEqual(vstart.mget("Errors.Fail", 0)+1, vend.Errors.Fail)
def test_trailing_comment(self):
vstart = self.env.debug_vars()
bv={}
bv["ival"] = 1
self.env.execute("select * from vtocc_test where intval=%(ival)s", bv)
vend = self.env.debug_vars()
self.assertEqual(vstart.mget("Voltron.QueryCache.Length", 0)+1, vend.Voltron.QueryCache.Length)
# This should not increase the query cache size
self.env.execute("select * from vtocc_test where intval=%(ival)s /* trailing comment */", bv)
vend = self.env.debug_vars()
self.assertEqual(vstart.mget("Voltron.QueryCache.Length", 0)+1, vend.Voltron.QueryCache.Length)
def test_for_update(self):
try:
self.env.execute("select * from vtocc_test where intval=2 for update")
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError), e:
self.assertContains(e[1], "error: Disallowed")
else:
self.assertFail("Did not receive exception")
# If these throw no exceptions, we're good
self.env.execute("begin")
self.env.execute("select * from vtocc_test where intval=2 for update")
self.env.execute("commit")
# Make sure the row is not locked for read
self.env.execute("select * from vtocc_test where intval=2")
def test_pool_size(self):
vstart = self.env.debug_vars()
self.env.execute("set vt_pool_size=1")
try:
self.env.execute("select sleep(3) from dual")
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError):
pass
else:
self.assertFail("Did not receive exception")
self.env.execute("select 1 from dual")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.ConnPool.Capacity, 1)
self.assertEqual(vstart.Voltron.ConnPool.WaitCount+1, vend.Voltron.ConnPool.WaitCount)
self.env.execute("set vt_pool_size=16")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.ConnPool.Capacity, 16)
def test_transaction_cap(self):
vstart = self.env.debug_vars()
self.env.execute("set vt_transaction_cap=1")
co2 = self.env.connect()
self.env.execute("begin")
try:
cu2 = co2.cursor()
cu2.execute("begin", {})
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError), e:
self.assertContains(e[1], "error: Transaction")
else:
self.assertFail("Did not receive exception")
finally:
cu2.close()
co2.close()
self.env.execute("commit")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.TxPool.Capacity, 1)
self.env.execute("set vt_transaction_cap=20")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.TxPool.Capacity, 20)
def test_transaction_timeout(self):
vstart = self.env.debug_vars()
self.env.execute("set vt_transaction_timeout=1")
self.env.execute("begin")
time.sleep(2)
try:
self.env.execute("commit")
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError), e:
self.assertContains(e[1], "error: Transaction")
else:
self.assertFail("Did not receive exception")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.ActiveTxPool.Timeout, 1)
self.assertEqual(vstart.mget("Kills.Transactions", 0)+1, vend.Kills.Transactions)
self.env.execute("set vt_transaction_timeout=30")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.ActiveTxPool.Timeout, 30)
def test_query_cache(self):
vstart = self.env.debug_vars()
self.env.execute("set vt_query_cache_size=1")
bv={}
bv["ival1"] = 1
self.env.execute("select * from vtocc_test where intval=%(ival1)s", bv)
bv["ival2"] = 1
self.env.execute("select * from vtocc_test where intval=%(ival2)s", bv)
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.QueryCache.Length, 1)
self.assertEqual(vend.Voltron.QueryCache.Size, 1)
self.assertEqual(vend.Voltron.QueryCache.Capacity, 1)
self.env.execute("set vt_query_cache_size=5000")
self.env.execute("select * from vtocc_test where intval=%(ival1)s", bv)
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.QueryCache.Length, 2)
self.assertEqual(vend.Voltron.QueryCache.Size, 2)
self.assertEqual(vend.Voltron.QueryCache.Capacity, 5000)
def test_schema_reload_time(self):
mcu = self.env.mysql_conn.cursor()
mcu.execute("create table vtocc_temp(intval int)")
# This should cause a reload
self.env.execute("set vt_schema_reload_time=600")
try:
for i in range(10):
try:
self.env.execute("select * from vtocc_temp")
except db.MySQLErrors.DatabaseError, e:
self.assertContains(e[1], "not found in schema")
time.sleep(1)
else:
break
# Should not throw an exception
self.env.execute("select * from vtocc_temp")
finally:
mcu.execute("drop table vtocc_temp")
mcu.close()
def test_max_result_size(self):
self.env.execute("set vt_max_result_size=2")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.MaxResultSize, 2)
try:
self.env.execute("select * from vtocc_test")
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError), e:
self.assertContains(e[1], "error: Row")
else:
self.assertFail("Did not receive exception")
self.env.execute("set vt_max_result_size=10000")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.MaxResultSize, 10000)
def test_query_timeout(self):
vstart = self.env.debug_vars()
conn = db.connect("localhost:9461", 5, dbname=self.env.cfg['dbname'])
cu = conn.cursor()
self.env.execute("set vt_query_timeout=1")
try:
cu.execute("begin", {})
cu.execute("select sleep(2) from vtocc_test", {})
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError), e:
if "error: Query" not in e[1] and "error: Lost connection" not in e[1]:
print e[1]
self.assertFail("Query not killed as expected")
else:
self.assertFail("Did not receive exception")
try:
cu.execute("select 1 from dual", {})
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError), e:
self.assertContains(e[1], "error: Transaction")
else:
self.assertFail("Did not receive exception")
try:
cu.close()
conn.close()
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError), e:
self.assertContains(str(e), "error: Transaction")
else:
self.assertFail("Did not receive exception")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.ActivePool.Timeout, 1)
self.assertEqual(vstart.mget("Kills.Queries", 0)+1, vend.Kills.Queries)
self.env.execute("set vt_query_timeout=30")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.ActivePool.Timeout, 30)
def test_idle_timeout(self):
vstart = self.env.debug_vars()
self.env.execute("set vt_idle_timeout=1")
time.sleep(2)
self.env.execute("select 1 from dual")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.ConnPool.IdleTimeout, 1)
self.assertEqual(vend.Voltron.TxPool.IdleTimeout, 1)
self.env.execute("set vt_idle_timeout=1800")
vend = self.env.debug_vars()
self.assertEqual(vend.Voltron.ConnPool.IdleTimeout, 1800)
self.assertEqual(vend.Voltron.TxPool.IdleTimeout, 1800)
def test_consolidation(self):
vstart = self.env.debug_vars()
for i in range(2):
try:
self.env.execute("select sleep(3) from dual")
except (db.MySQLErrors.DatabaseError, db.dbexceptions.OperationalError):
pass
vend = self.env.debug_vars()
self.assertEqual(vstart.mget("Waits.TotalCount", 0)+1, vend.Waits.TotalCount)
self.assertEqual(vstart.mget("Waits.Histograms.Consolidations.Count", 0)+1, vend.Waits.Histograms.Consolidations.Count)
def test_sqls(self):
error_count = self.env.run_cases(nocache_cases.nocache_cases)
if error_count != 0:
self.assertFail("test_execution errors: %d"%(error_count))
| Python |
#!/usr/bin/env python
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
import optparse
import os
import subprocess
import sys
import time
import urllib2
import MySQLdb as mysql
from vtdb import vt_occ2 as db
from vtdb import dbexceptions
from vttest import framework
from vttest import cache_tests
from vttest import nocache_tests
parser = optparse.OptionParser(usage="usage: %prog [options]")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False)
parser.add_option("-t", "--testcase", action="store", dest="testcase", default=None,
help="Run a single named test")
parser.add_option("-c", "--dbconfig", action="store", dest="dbconfig", default="dbtest.json",
help="json db config file")
(options, args) = parser.parse_args()
LOGFILE = "/tmp/vtocc.log"
QUERYLOGFILE = "/tmp/vtocc_queries.log"
class TestEnv(object):
def setUp(self):
vttop = os.getenv("VTTOP")
if vttop is None:
raise Exception("VTTOP not defined")
occpath = vttop+"/go/cmd/vtocc/"
with open(options.dbconfig) as f:
self.cfg = json.load(f)
self.mysql_conn = self.mysql_connect(self.cfg)
mcu = self.mysql_conn.cursor()
self.clean_sqls = []
self.init_sqls = []
clean_mode = False
with open("test_schema.sql") as f:
for line in f:
line = line.rstrip()
if line == "# clean":
clean_mode = True
if line=='' or line.startswith("#"):
continue
if clean_mode:
self.clean_sqls.append(line)
else:
self.init_sqls.append(line)
try:
for line in self.init_sqls:
mcu.execute(line, {})
finally:
mcu.close()
if self.cfg.get("memcache"):
self.memcached = subprocess.Popen(["memcached", "-s", self.cfg["memcache"]])
occ_args = [
vttop+"/go/cmd/vtocc/vtocc",
"-config", "occ.json",
"-dbconfig", options.dbconfig,
"-logfile", LOGFILE,
"-querylog", QUERYLOGFILE,
]
self.vtstderr = open("/tmp/vtocc_stderr.log", "a+")
self.vtocc = subprocess.Popen(occ_args, stderr=self.vtstderr)
for i in range(30):
try:
self.conn = self.connect()
self.querylog = framework.Tailer(open(QUERYLOGFILE, "r"))
self.log = framework.Tailer(open(LOGFILE, "r"))
return
except dbexceptions.OperationalError:
if i == 29:
raise
time.sleep(1)
def tearDown(self):
try:
mcu = self.mysql_conn.cursor()
for line in self.clean_sqls:
try:
mcu.execute(line, {})
except:
pass
mcu.close()
except:
pass
if getattr(self, "vtocc", None):
self.vtocc.terminate()
if getattr(self, "vtstderr", None):
self.vtstderr.close()
if getattr(self, "memcached", None):
self.memcached.terminate()
def mysql_connect(self, cfg):
return mysql.connect(
host=cfg.get('host', ''),
user=cfg.get('uname', ''),
passwd=cfg.get('pass', ''),
port=cfg.get('port', 0),
db=cfg.get('dbname'),
unix_socket=cfg.get('unix_socket', ''),
charset=cfg.get('charset', ''))
def connect(self):
return db.connect("localhost:9461", 2, dbname=self.cfg.get('dbname', None))
def execute(self, query, binds=None):
if binds is None:
binds = {}
curs = self.conn.cursor()
curs.execute(query, binds)
return curs
def debug_vars(self):
return framework.MultiDict(json.load(urllib2.urlopen("http://localhost:9461/debug/vars")))
def table_stats(self):
return framework.MultiDict(json.load(urllib2.urlopen("http://localhost:9461/debug/schema/tables")))
def run_cases(self, cases):
curs = self.conn.cursor()
error_count = 0
count = 0
for case in cases:
if options.verbose:
print case[0]
count += 1
if len(case) == 5:
tstart = self.table_stats()[case[4][0]]
if len(case) == 1:
curs.execute(case[0])
continue
self.querylog.reset()
curs.execute(case[0], case[1])
if len(case) == 2:
continue
if case[2] is not None:
results = []
for row in curs:
results.append(row)
if results != case[2]:
print "Function: run_cases(%d): FAIL: %s:\n%s\n%s"%(count, case[0], case[2], results)
error_count += 1
if len(case) == 3:
continue
if case[3] is not None:
querylog = normalizelog(self.querylog.read())
if querylog != case[3]:
print "Function: run_cases(%d): FAIL: %s:\n%s\n%s"%(count, case[0], case[3], querylog)
error_count += 1
if len(case) == 4:
continue
tend = self.table_stats()[case[4][0]]
if tstart["Hits"]+case[4][1] != tend["Hits"]:
print "Function: run_cases(%d): FAIL: %s:\nHits: %s!=%s"%(count, case[0], tstart["Hits"]+case[4][1], tend["Hits"])
error_count += 1
if tstart["Absent"]+case[4][2] != tend["Absent"]:
print "Function: run_cases(%d): FAIL: %s:\nAbsent: %s!=%s"%(count, case[0], tstart["Absent"]+case[4][2], tend["Absent"])
error_count += 1
if tstart["Misses"]+case[4][3] != tend["Misses"]:
print "Function: run_cases(%d): FAIL: %s:\nMisses: %s!=%s"%(count, case[0], tstart["Misses"]+case[4][3], tend["Misses"])
error_count += 1
if tstart["Invalidations"]+case[4][4] != tend["Invalidations"]:
print "Function: run_cases(%d): FAIL: %s:\nInvalidations: %s!=%s"%(count, case[0], tstart["Invalidations"]+case[4][4], tend["Invalidations"])
error_count += 1
return error_count
def normalizelog(data):
lines = data.split("\n")
newlines = []
for line in lines:
pos = line.find("INFO: ")
if pos >= 0:
newlines.append(line[pos+6:])
return newlines
env = TestEnv()
try:
env.setUp()
try:
t = nocache_tests.TestNocache(options.testcase, options.verbose)
t.set_env(env)
t.run()
except KeyError:
pass
if getattr(env, "memcached", None):
print "Testing row cache"
t = cache_tests.TestCache(options.testcase, options.verbose)
t.set_env(env)
t.run()
finally:
env.tearDown()
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
cache_cases = [
["alter table vtocc_cached comment 'new'"],
# SELECT_PK (empty cache)
[
"select * from vtocc_cached where eid = 2 and bid = 'foo'", {},
[(2, 'foo', 'abcd2', 'efgh')],
["select eid, bid, name, foo from vtocc_cached where eid = 2 and bid = 'foo'"],
['vtocc_cached', 0, 0, 1, 0],
], # (2.foo) is in cache
# SELECT_PK, use cache
[
"select bid, eid, name, foo from vtocc_cached where eid = 2 and bid = 'foo'", {},
[('foo', 2, 'abcd2', 'efgh')],
[],
['vtocc_cached', 1, 0, 0, 0],
], # (2.foo)
# SELECT_PK, absent
[
"select bid, eid, name, foo from vtocc_cached where eid = 3 and bid = 'foo'", {},
[],
["select eid, bid, name, foo from vtocc_cached where eid = 3 and bid = 'foo'"],
['vtocc_cached', 0, 1, 0, 0],
], # (2.foo)
# SELECT_PK, number as string
[
"select bid, eid, name, foo from vtocc_cached where eid = '0x2' and bid = 'foo'", {},
[('foo', 2, 'abcd2', 'efgh')],
[],
['vtocc_cached', 1, 0, 0, 0],
], # (2.foo)
# SELECT_SUBQUERY (2.foo)
[
"select * from vtocc_cached where eid = 2 and name = 'abcd2'", {},
[(2L, 'bar', 'abcd2', 'efgh'), (2L, 'foo', 'abcd2', 'efgh')],
[
"select eid, bid from vtocc_cached use index (aname) where eid = 2 and name = 'abcd2' limit 10001",
"select eid, bid, name, foo from vtocc_cached where eid = 2 and bid = 'bar'"
],
['vtocc_cached', 1, 0, 1, 0],
], # (2.bar, 2.foo)
# SELECT_SUBQUERY (2.foo, 2.bar)
[
"select * from vtocc_cached where eid = 2 and name = 'abcd2'", {},
[(2L, 'bar', 'abcd2', 'efgh'), (2L, 'foo', 'abcd2', 'efgh')],
["select eid, bid from vtocc_cached use index (aname) where eid = 2 and name = 'abcd2' limit 10001"],
['vtocc_cached', 2, 0, 0, 0],
], # (2.bar, 2.foo)
# out of order columns list
[
"select bid, eid from vtocc_cached where eid = 1 and bid = 'foo'", {},
[('foo', 1)],
["select eid, bid, name, foo from vtocc_cached where eid = 1 and bid = 'foo'"],
['vtocc_cached', 0, 0, 1, 0],
], # (1.foo, 2.bar, 2.foo)
# out of order columns list, use cache
[
"select bid, eid from vtocc_cached where eid = 1 and bid = 'foo'", {},
[('foo', 1)],
[],
['vtocc_cached', 1, 0, 0, 0],
], # (1.foo, 2.bar, 2.foo)
# SELECT_CACHE_RESULT (it currently doesn't cache)
['select * from vtocc_cached'],
[
"select eid, bid, name, foo from vtocc_cached", {},
None,
["select eid, bid, name, foo from vtocc_cached limit 10001"],
['vtocc_cached', 0, 0, 0, 0],
], # (1.foo, 2.bar, 2.foo)
# verify 1.bar is not cached
[
"select bid, eid from vtocc_cached where eid = 1 and bid = 'bar'", {},
[('bar', 1)],
["select eid, bid, name, foo from vtocc_cached where eid = 1 and bid = 'bar'"],
['vtocc_cached', 0, 0, 1, 0],
], # (1.foo, 1.bar, 2.foo, 2.bar)
# update
['begin'],
["update vtocc_cached set foo='fghi' where bid = 'bar'"],
[
"commit", {}, None, None,
['vtocc_cached', 0, 0, 0, 2],
],
[
"select * from vtocc_cached where eid = 2 and name = 'abcd2'", {},
[(2L, 'bar', 'abcd2', 'fghi'), (2L, 'foo', 'abcd2', 'efgh')],
[
"select eid, bid from vtocc_cached use index (aname) where eid = 2 and name = 'abcd2' limit 10001",
"select eid, bid, name, foo from vtocc_cached where eid = 2 and bid = 'bar'"
],
['vtocc_cached', 1, 0, 1, 0],
], # (1.foo, 2.foo, 2.bar)
# Verify cache
["select sleep(0.2) from dual"],
[
"select * from vtocc_cached where eid = 2 and name = 'abcd2'", {},
[(2L, 'bar', 'abcd2', 'fghi'), (2L, 'foo', 'abcd2', 'efgh')],
["select eid, bid from vtocc_cached use index (aname) where eid = 2 and name = 'abcd2' limit 10001"],
['vtocc_cached', 2, 0, 0, 0],
], # (1.foo, 2.bar, 2.foo)
# this will use the cache
[
"select * from vtocc_cached where eid = 2 and name = 'abcd2'", {},
[(2L, 'bar', 'abcd2', 'fghi'), (2L, 'foo', 'abcd2', 'efgh')],
["select eid, bid from vtocc_cached use index (aname) where eid = 2 and name = 'abcd2' limit 10001"],
['vtocc_cached', 2, 0, 0, 0],
], # (1.foo, 2.bar, 2.foo)
# this will not invalidate the cache
['begin'],
["update vtocc_cached set foo='fghi' where bid = 'bar'"],
["rollback"],
[
"select * from vtocc_cached where eid = 2 and name = 'abcd2'", {},
[(2L, 'bar', 'abcd2', 'fghi'), (2L, 'foo', 'abcd2', 'efgh')],
["select eid, bid from vtocc_cached use index (aname) where eid = 2 and name = 'abcd2' limit 10001"],
['vtocc_cached', 2, 0, 0, 0],
], # (1.foo, 2.bar, 2.foo)
# delete
['begin'],
["delete from vtocc_cached where eid = 2 and bid = 'bar'"],
[
"commit", {}, None, None,
['vtocc_cached', 0, 0, 0, 1],
],
[
"select * from vtocc_cached where eid = 2 and name = 'abcd2'", {},
[(2L, 'foo', 'abcd2', 'efgh')],
["select eid, bid from vtocc_cached use index (aname) where eid = 2 and name = 'abcd2' limit 10001"],
['vtocc_cached', 1, 0, 0, 0],
],
["begin"],
["insert into vtocc_cached(eid, bid, name, foo) values (2, 'bar', 'abcd2', 'efgh')"],
[
"commit", {}, None, None,
['vtocc_cached', 0, 0, 0, 1],
], # (1.foo, 2.foo)
# insert on dup key
['begin'],
["insert into vtocc_cached(eid, bid, name, foo) values (2, 'foo', 'abcd2', 'efgh') on duplicate key update foo='fghi'"],
[
"commit", {}, None, None,
['vtocc_cached', 0, 0, 0, 1],
],
[
"select * from vtocc_cached where eid = 2 and name = 'abcd2'", {},
[(2L, 'bar', 'abcd2', 'efgh'), (2L, 'foo', 'abcd2', 'fghi')],
[
"select eid, bid from vtocc_cached use index (aname) where eid = 2 and name = 'abcd2' limit 10001",
"select eid, bid, name, foo from vtocc_cached where eid = 2 and bid = 'bar'",
"select eid, bid, name, foo from vtocc_cached where eid = 2 and bid = 'foo'"
],
['vtocc_cached', 0, 0, 2, 0],
], # (1.foo)
# Verify 1.foo is in cache
[
"select * from vtocc_cached where eid = 1 and bid = 'foo'", {},
[(1, 'foo', 'abcd1', 'efgh')],
[],
['vtocc_cached', 1, 0, 0, 0],
], # (1.foo) is in cache
# DDL
["alter table vtocc_cached comment 'test'"],
# Verify cache is empty
[
"select * from vtocc_cached where eid = 1 and bid = 'foo'", {},
[(1, 'foo', 'abcd1', 'efgh')],
["select eid, bid, name, foo from vtocc_cached where eid = 1 and bid = 'foo'"],
['vtocc_cached', 0, 0, 1, 0],
], # (1.foo)
# Verify row is cached
[
"select * from vtocc_cached where eid = 1 and bid = 'foo'", {},
[(1, 'foo', 'abcd1', 'efgh')],
[],
['vtocc_cached', 1, 0, 0, 0],
], # (1.foo)
]
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
nocache_cases = [
# union
[
'select /* union */ eid, id from vtocc_a union select eid, id from vtocc_b', {},
[(1L, 1L), (1L, 2L)],
['select /* union */ eid, id from vtocc_a union select eid, id from vtocc_b'],
],
# distinct
[
'select /* distinct */ distinct * from vtocc_a', {},
[(1L, 1L, 'abcd', 'efgh'), (1L, 2L, 'bcde', 'fghi')],
['select /* distinct */ distinct * from vtocc_a limit 10001'],
],
# group by
[
'select /* group by */ eid, sum(id) from vtocc_a group by eid', {},
[(1L, 3L)],
['select /* group by */ eid, sum(id) from vtocc_a group by eid limit 10001'],
],
# having
[
'select /* having */ sum(id) from vtocc_a having sum(id) = 3', {},
[(3L,)],
['select /* having */ sum(id) from vtocc_a having sum(id) = 3 limit 10001'],
],
# limit
[
'select /* limit */ eid, id from vtocc_a limit %(a)s', {"a": 1},
[(1L, 1L)],
['select /* limit */ eid, id from vtocc_a limit 1'],
],
# multi-table
[
'select /* multi-table */ a.eid, a.id, b.eid, b.id from vtocc_a as a, vtocc_b as b', {},
[(1L, 1L, 1L, 1L), (1L, 2L, 1L, 1L), (1L, 1L, 1L, 2L), (1L, 2L, 1L, 2L)],
['select /* multi-table */ a.eid, a.id, b.eid, b.id from vtocc_a as a, vtocc_b as b limit 10001'],
],
# multi-table join
[
'select /* multi-table join */ a.eid, a.id, b.eid, b.id from vtocc_a as a join vtocc_b as b on a.eid = b.eid and a.id = b.id', {},
[(1L, 1L, 1L, 1L), (1L, 2L, 1L, 2L)],
['select /* multi-table join */ a.eid, a.id, b.eid, b.id from vtocc_a as a join vtocc_b as b on a.eid = b.eid and a.id = b.id limit 10001'],
],
# complex select list
[
'select /* complex select list */ eid+1, id from vtocc_a', {},
[(2L, 1L), (2L, 2L)],
['select /* complex select list */ eid+1, id from vtocc_a limit 10001'],
],
# *
[
'select /* * */ * from vtocc_a', {},
[(1L, 1L, 'abcd', 'efgh'), (1L, 2L, 'bcde', 'fghi')],
['select /* * */ * from vtocc_a limit 10001'],
],
# table alias
[
'select /* table alias */ a.eid from vtocc_a as a where a.eid=1', {},
[(1L,), (1L,)],
['select /* table alias */ a.eid from vtocc_a as a where a.eid = 1 limit 10001'],
],
# parenthesised col
[
'select /* parenthesised col */ (eid) from vtocc_a where eid = 1 and id = 1', {},
[(1L,)],
['select /* parenthesised col */ eid from vtocc_a where eid = 1 and id = 1 limit 10001'],
],
# for update
['begin'],
[
'select /* for update */ eid from vtocc_a where eid = 1 and id = 1 for update', {},
[(1L,)],
['select /* for update */ eid from vtocc_a where eid = 1 and id = 1 limit 10001 for update'],
],
['commit'],
# complex where
[
'select /* complex where */ id from vtocc_a where id+1 = 2', {},
[(1L,)],
['select /* complex where */ id from vtocc_a where id+1 = 2 limit 10001'],
],
# complex where (non-value operand)
[
'select /* complex where (non-value operand) */ eid, id from vtocc_a where eid = id', {},
[(1L, 1L)],
['select /* complex where (non-value operand) */ eid, id from vtocc_a where eid = id limit 10001'],
],
# (condition)
[
'select /* (condition) */ * from vtocc_a where (eid = 1)', {},
[(1L, 1L, 'abcd', 'efgh'), (1L, 2L, 'bcde', 'fghi')],
['select /* (condition) */ * from vtocc_a where (eid = 1) limit 10001'],
],
# inequality
[
'select /* inequality */ * from vtocc_a where id > 1', {},
[(1L, 2L, 'bcde', 'fghi')],
['select /* inequality */ * from vtocc_a where id > 1 limit 10001'],
],
# in
[
'select /* in */ * from vtocc_a where id in (1, 2)', {},
[(1L, 1L, 'abcd', 'efgh'), (1L, 2L, 'bcde', 'fghi')],
['select /* in */ * from vtocc_a where id in (1, 2) limit 10001'],
],
# between
[
'select /* between */ * from vtocc_a where id between 1 and 2', {},
[(1L, 1L, 'abcd', 'efgh'), (1L, 2L, 'bcde', 'fghi')],
['select /* between */ * from vtocc_a where id between 1 and 2 limit 10001'],
],
# order
[
'select /* order */ * from vtocc_a order by id desc', {},
[(1L, 2L, 'bcde', 'fghi'), (1L, 1L, 'abcd', 'efgh')],
['select /* order */ * from vtocc_a order by id desc limit 10001'],
],
# simple insert
['begin'],
[
"insert /* simple */ into vtocc_a values (2, 1, 'aaaa', 'bbbb')", {},
[],
["insert /* simple */ into vtocc_a values (2, 1, 'aaaa', 'bbbb') /* _stream vtocc_a (eid id ) (2 1 ); */"],
],
['commit'],
['select * from vtocc_a where eid = 2 and id = 1', {}, [(2L, 1L, 'aaaa', 'bbbb')]],
['begin'], ['delete from vtocc_a where eid>1'], ['commit'],
# qualified insert
['begin'],
[
"insert /* qualified */ into vtocc_a(eid, id, name, foo) values (3, 1, 'aaaa', 'cccc')", {},
[],
["insert /* qualified */ into vtocc_a(eid, id, name, foo) values (3, 1, 'aaaa', 'cccc') /* _stream vtocc_a (eid id ) (3 1 ); */"],
],
['commit'],
['select * from vtocc_a where eid = 3 and id = 1', {}, [(3L, 1L, 'aaaa', 'cccc')]],
['begin'], ['delete from vtocc_a where eid>1'], ['commit'],
# bind values
['begin'],
[
"insert /* bind values */ into vtocc_a(eid, id, name, foo) values (%(eid)s, %(id)s, %(name)s, %(foo)s)",
{"eid": 4, "id": 1, "name": "aaaa", "foo": "cccc"},
[],
["insert /* bind values */ into vtocc_a(eid, id, name, foo) values (4, 1, 'aaaa', 'cccc') /* _stream vtocc_a (eid id ) (4 1 ); */"],
],
['commit'],
['select * from vtocc_a where eid = 4 and id = 1', {}, [(4L, 1L, 'aaaa', 'cccc')]],
['begin'], ['delete from vtocc_a where eid>1'], ['commit'],
# out of sequence columns
['begin'],
[
"insert into vtocc_a(id, eid, foo, name) values (-1, 5, 'aaa', 'bbb')", {},
[],
["insert into vtocc_a(id, eid, foo, name) values (-1, 5, 'aaa', 'bbb') /* _stream vtocc_a (eid id ) (5 -1 ); */"],
],
['commit'],
['select * from vtocc_a where eid = 5 and id = -1', {}, [(5L, -1L, 'bbb', 'aaa')]],
['begin'], ['delete from vtocc_a where eid>1'], ['commit'],
# numbers as strings
['begin'],
[
"insert into vtocc_a(id, eid, foo, name) values (%(id)s, '6', 111, 222)", { "id": "1"},
[],
["insert into vtocc_a(id, eid, foo, name) values ('1', '6', 111, 222) /* _stream vtocc_a (eid id ) (6 1 ); */"],
],
['commit'],
['select * from vtocc_a where eid = 6 and id = 1', {}, [(6L, 1L, '222', '111')]],
['begin'], ['delete from vtocc_a where eid>1'], ['commit'],
# strings as numbers
['begin'],
[
"insert into vtocc_c(name, eid, foo) values (%(name)s, '9', 'aaa')", { "name": "bbb"},
[],
["insert into vtocc_c(name, eid, foo) values ('bbb', '9', 'aaa') /* _stream vtocc_c (eid name ) (9 'YmJi' ); */"],
],
['commit'],
['select * from vtocc_c where eid = 9', {}, [(9, 'bbb', 'aaa')]],
['begin'], ['delete from vtocc_c where eid<10'], ['commit'],
# expressions
['begin'],
[
"insert into vtocc_a(eid, id, name, foo) values (7, 1+1, '', '')", {},
[],
["insert into vtocc_a(eid, id, name, foo) values (7, 1+1, '', '')"],
],
['commit'],
['select * from vtocc_a where eid = 7', {}, [(7L, 2L, '', '')]],
['begin'], ['delete from vtocc_a where eid>1'], ['commit'],
# no index
['begin'],
[
"insert into vtocc_d(eid, id) values (1, 1)", {},
[],
["insert into vtocc_d(eid, id) values (1, 1)"],
],
['commit'],
['select * from vtocc_d', {}, [(1L, 1L)]],
['begin'], ['delete from vtocc_d'], ['commit'],
# on duplicate key
['begin'],
[
"insert into vtocc_a(eid, id, name, foo) values (8, 1, '', '') on duplicate key update name = 'foo'", {},
[],
["insert into vtocc_a(eid, id, name, foo) values (8, 1, '', '') on duplicate key update name = 'foo' /* _stream vtocc_a (eid id ) (8 1 ); */"],
],
['commit'],
['select * from vtocc_a where eid = 8', {}, [(8L, 1L, '', '')]],
['begin'],
[
"insert into vtocc_a(eid, id, name, foo) values (8, 1, '', '') on duplicate key update name = 'foo'", {},
[],
["insert into vtocc_a(eid, id, name, foo) values (8, 1, '', '') on duplicate key update name = 'foo' /* _stream vtocc_a (eid id ) (8 1 ); */"],
],
['commit'],
['select * from vtocc_a where eid = 8', {}, [(8L, 1L, 'foo', '')]],
['begin'],
[
"insert into vtocc_a(eid, id, name, foo) values (8, 1, '', '') on duplicate key update id = 2", {},
[],
["insert into vtocc_a(eid, id, name, foo) values (8, 1, '', '') on duplicate key update id = 2 /* _stream vtocc_a (eid id ) (8 1 ) (8 2 ); */"],
],
['commit'],
['select * from vtocc_a where eid = 8', {}, [(8L, 2L, 'foo', '')]],
['begin'],
[
"insert into vtocc_a(eid, id, name, foo) values (8, 2, '', '') on duplicate key update id = 2+1", {},
[],
["insert into vtocc_a(eid, id, name, foo) values (8, 2, '', '') on duplicate key update id = 2+1"],
],
['commit'],
['select * from vtocc_a where eid = 8', {}, [(8L, 3L, 'foo', '')]],
['begin'], ['delete from vtocc_a where eid>1'], ['commit'],
# subquery
['begin'],
[
"insert /* subquery */ into vtocc_a(eid, id, name, foo) select eid, foo, name, foo from vtocc_c", {},
[],
[
'select eid, foo, name, foo from vtocc_c limit 10001',
"insert /* subquery */ into vtocc_a(eid, id, name, foo) values (10, 20, 'abcd', '20'), (11, 30, 'bcde', '30') /* _stream vtocc_a (eid id ) (10 20 ) (11 30 ); */",
],
],
['commit'],
['select * from vtocc_a where eid in (10, 11)', {}, [(10L, 20L, 'abcd', '20'), (11L, 30L, 'bcde', '30')]],
['begin'],
[
"insert into vtocc_a(eid, id, name, foo) select eid, foo, name, foo from vtocc_c on duplicate key update foo='bar'", {},
[],
[
'select eid, foo, name, foo from vtocc_c limit 10001',
"insert into vtocc_a(eid, id, name, foo) values (10, 20, 'abcd', '20'), (11, 30, 'bcde', '30') on duplicate key update foo = 'bar' /* _stream vtocc_a (eid id ) (10 20 ) (11 30 ); */",
],
],
['commit'],
['select * from vtocc_a where eid in (10, 11)', {}, [(10L, 20L, 'abcd', 'bar'), (11L, 30L, 'bcde', 'bar')]],
['begin'],
[
"insert into vtocc_a(eid, id, name, foo) select eid, foo, name, foo from vtocc_c limit 1 on duplicate key update id = 21", {},
[],
[
'select eid, foo, name, foo from vtocc_c limit 1',
"insert into vtocc_a(eid, id, name, foo) values (10, 20, 'abcd', '20') on duplicate key update id = 21 /* _stream vtocc_a (eid id ) (10 20 ) (10 21 ); */",
],
],
['commit'],
['select * from vtocc_a where eid in (10, 11)', {}, [(10L, 21L, 'abcd', 'bar'), (11L, 30, 'bcde', 'bar')]],
['begin'], ['delete from vtocc_a where eid>1'], ['delete from vtocc_c where eid<10'], ['commit'],
# multi-value
['begin'],
[
"insert into vtocc_a(eid, id, name, foo) values (5, 1, '', ''), (7, 1, '', '')", {},
[],
["insert into vtocc_a(eid, id, name, foo) values (5, 1, '', ''), (7, 1, '', '') /* _stream vtocc_a (eid id ) (5 1 ) (7 1 ); */"],
],
['commit'],
['select * from vtocc_a where eid>1', {}, [(5L, 1L, '', ''), (7L, 1L, '', '')]],
['begin'], ['delete from vtocc_a where eid>1'], ['commit'],
# update
['begin'],
[
"update /* pk */ vtocc_a set foo='bar' where eid = 1 and id = 1", {},
[],
["update /* pk */ vtocc_a set foo = 'bar' where eid = 1 and id = 1 /* _stream vtocc_a (eid id ) (1 1 ); */"]
],
['commit'],
['select foo from vtocc_a where id = 1', {}, [('bar',)]],
['begin'], ["update vtocc_a set foo='efgh' where id=1"], ['commit'],
# single in
['begin'],
[
"update /* pk */ vtocc_a set foo='bar' where eid = 1 and id in (1, 2)", {},
[],
["update /* pk */ vtocc_a set foo = 'bar' where eid = 1 and id in (1, 2) /* _stream vtocc_a (eid id ) (1 1 ) (1 2 ); */"],
],
['commit'],
['select foo from vtocc_a where id = 1', {}, [('bar',)]],
['begin'], ["update vtocc_a set foo='efgh' where id=1"], ["update vtocc_a set foo='fghi' where id=2"], ['commit'],
# double in
['begin'],
[
"update /* pk */ vtocc_a set foo='bar' where eid in (1) and id in (1, 2)", {},
[],
[
'select eid, id from vtocc_a where eid in (1) and id in (1, 2) limit 10001 for update',
"update /* pk */ vtocc_a set foo = 'bar' where eid = 1 and id = 1 /* _stream vtocc_a (eid id ) (1 1 ); */",
"update /* pk */ vtocc_a set foo = 'bar' where eid = 1 and id = 2 /* _stream vtocc_a (eid id ) (1 2 ); */",
],
],
['commit'],
['select foo from vtocc_a where id = 1', {}, [('bar',)]],
['begin'], ["update vtocc_a set foo='efgh' where id=1"], ["update vtocc_a set foo='fghi' where id=2"], ['commit'],
# double in 2
['begin'],
[
"update /* pk */ vtocc_a set foo='bar' where eid in (1, 2) and id in (1, 2)", {},
[],
[
'select eid, id from vtocc_a where eid in (1, 2) and id in (1, 2) limit 10001 for update',
"update /* pk */ vtocc_a set foo = 'bar' where eid = 1 and id = 1 /* _stream vtocc_a (eid id ) (1 1 ); */",
"update /* pk */ vtocc_a set foo = 'bar' where eid = 1 and id = 2 /* _stream vtocc_a (eid id ) (1 2 ); */",
],
],
['commit'],
['select foo from vtocc_a where id = 1', {}, [('bar',)]],
['begin'], ["update vtocc_a set foo='efgh' where id=1"], ["update vtocc_a set foo='fghi' where id=2"], ['commit'],
# tuple in
['begin'],
[
"update /* pk */ vtocc_a set foo='bar' where (eid, id) in ((1, 1), (1, 2))", {},
[],
["update /* pk */ vtocc_a set foo = 'bar' where (eid, id) in ((1, 1), (1, 2)) /* _stream vtocc_a (eid id ) (1 1 ) (1 2 ); */"],
],
['commit'],
['select foo from vtocc_a where id = 1', {}, [('bar',)]],
['begin'], ["update vtocc_a set foo='efgh' where id=1"], ["update vtocc_a set foo='fghi' where id=2"], ['commit'],
# pk change
['begin'],
[
"update vtocc_a set eid = 2 where eid = 1 and id = 1", {},
[],
["update vtocc_a set eid = 2 where eid = 1 and id = 1 /* _stream vtocc_a (eid id ) (1 1 ) (2 1 ); */"],
],
['commit'],
['select eid from vtocc_a where id = 1', {}, [(2L,)]],
['begin'], ["update vtocc_a set eid=1 where id=1"], ['commit'],
# complex pk change
['begin'],
[
"update vtocc_a set eid = 1+1 where eid = 1 and id = 1", {},
[],
["update vtocc_a set eid = 1+1 where eid = 1 and id = 1"],
],
['commit'],
['select eid from vtocc_a where id = 1', {}, [(2L,)]],
['begin'], ["update vtocc_a set eid=1 where id=1"], ['commit'],
# complex where
['begin'],
[
"update vtocc_a set eid = 1+1 where eid = 1 and id = 1+0", {},
[],
["update vtocc_a set eid = 1+1 where eid = 1 and id = 1+0"],
],
['commit'],
['select eid from vtocc_a where id = 1', {}, [(2L,)]],
['begin'], ["update vtocc_a set eid=1 where id=1"], ['commit'],
# partial pk
['begin'],
[
"update /* pk */ vtocc_a set foo='bar' where id = 1", {},
[],
[
"select eid, id from vtocc_a where id = 1 limit 10001 for update",
"update /* pk */ vtocc_a set foo = 'bar' where eid = 1 and id = 1 /* _stream vtocc_a (eid id ) (1 1 ); */",
],
],
['commit'],
['select foo from vtocc_a where id = 1', {}, [('bar',)]],
['begin'], ["update vtocc_a set foo='efgh' where id=1"], ['commit'],
# limit
['begin'],
[
"update /* pk */ vtocc_a set foo='bar' where eid = 1 limit 1", {},
[],
[
"select eid, id from vtocc_a where eid = 1 limit 1 for update",
"update /* pk */ vtocc_a set foo = 'bar' where eid = 1 and id = 1 /* _stream vtocc_a (eid id ) (1 1 ); */",
],
],
['commit'],
['select foo from vtocc_a where id = 1', {}, [('bar',)]],
['begin'], ["update vtocc_a set foo='efgh' where id=1"], ['commit'],
# order by
['begin'],
[
"update /* pk */ vtocc_a set foo='bar' where eid = 1 order by id desc limit 1", {},
[],
[
"select eid, id from vtocc_a where eid = 1 order by id desc limit 1 for update",
"update /* pk */ vtocc_a set foo = 'bar' where eid = 1 and id = 2 /* _stream vtocc_a (eid id ) (1 2 ); */",
],
],
['commit'],
['select foo from vtocc_a where id = 2', {}, [('bar',)]],
['begin'], ["update vtocc_a set foo='fghi' where id=2"], ['commit'],
# missing where
['begin'],
[
"update vtocc_a set foo='bar'", {},
[],
[
"select eid, id from vtocc_a limit 10001 for update",
"update vtocc_a set foo = 'bar' where eid = 1 and id = 1 /* _stream vtocc_a (eid id ) (1 1 ); */",
"update vtocc_a set foo = 'bar' where eid = 1 and id = 2 /* _stream vtocc_a (eid id ) (1 2 ); */",
],
],
['commit'],
['select * from vtocc_a', {}, [(1L, 1L, 'abcd', 'bar'), (1L, 2L, 'bcde', 'bar')]],
['begin'], ["update vtocc_a set foo='efgh' where id=1"], ["update vtocc_a set foo='fghi' where id=2"], ['commit'],
# no index
['begin'],
["insert into vtocc_d(eid, id) values (1, 1)"],
[
"update vtocc_d set id = 2 where eid = 1", {},
[],
["update vtocc_d set id = 2 where eid = 1"],
],
['commit'],
['select * from vtocc_d', {}, [(1L, 2L)]],
['begin'], ['delete from vtocc_d'], ['commit'],
# delete
['begin'],
["insert into vtocc_a(eid, id, name, foo) values (2, 1, '', '')"],
[
"delete /* pk */ from vtocc_a where eid = 2 and id = 1", {},
[],
["delete /* pk */ from vtocc_a where eid = 2 and id = 1 /* _stream vtocc_a (eid id ) (2 1 ); */"],
],
['commit'],
['select * from vtocc_a where eid=2', {}, []],
# single in
['begin'],
["insert into vtocc_a(eid, id, name, foo) values (2, 1, '', '')"],
[
"delete /* pk */ from vtocc_a where eid = 2 and id in (1, 2)", {},
[],
["delete /* pk */ from vtocc_a where eid = 2 and id in (1, 2) /* _stream vtocc_a (eid id ) (2 1 ) (2 2 ); */"],
],
['commit'],
['select * from vtocc_a where eid=2', {}, []],
# double in
['begin'],
["insert into vtocc_a(eid, id, name, foo) values (2, 1, '', '')"],
[
"delete /* pk */ from vtocc_a where eid in (2) and id in (1, 2)", {},
[],
[
'select eid, id from vtocc_a where eid in (2) and id in (1, 2) limit 10001 for update',
'delete /* pk */ from vtocc_a where eid = 2 and id = 1 /* _stream vtocc_a (eid id ) (2 1 ); */',
],
],
['commit'],
['select * from vtocc_a where eid=2', {}, []],
# double in 2
['begin'],
["insert into vtocc_a(eid, id, name, foo) values (2, 1, '', '')"],
[
"delete /* pk */ from vtocc_a where eid in (2, 3) and id in (1, 2)", {},
[],
[
'select eid, id from vtocc_a where eid in (2, 3) and id in (1, 2) limit 10001 for update',
'delete /* pk */ from vtocc_a where eid = 2 and id = 1 /* _stream vtocc_a (eid id ) (2 1 ); */'
],
],
['commit'],
['select * from vtocc_a where eid=2', {}, []],
# tuple in
['begin'],
["insert into vtocc_a(eid, id, name, foo) values (2, 1, '', '')"],
[
"delete /* pk */ from vtocc_a where (eid, id) in ((2, 1), (3, 2))", {},
[],
["delete /* pk */ from vtocc_a where (eid, id) in ((2, 1), (3, 2)) /* _stream vtocc_a (eid id ) (2 1 ) (3 2 ); */"],
],
['commit'],
['select * from vtocc_a where eid=2', {}, []],
# complex where
['begin'],
["insert into vtocc_a(eid, id, name, foo) values (2, 1, '', '')"],
[
"delete from vtocc_a where eid = 1+1 and id = 1", {},
[],
[
'select eid, id from vtocc_a where eid = 1+1 and id = 1 limit 10001 for update',
"delete from vtocc_a where eid = 2 and id = 1 /* _stream vtocc_a (eid id ) (2 1 ); */",
],
],
['commit'],
['select * from vtocc_a where eid=2', {}, []],
# partial pk
['begin'],
["insert into vtocc_a(eid, id, name, foo) values (2, 1, '', '')"],
[
"delete from vtocc_a where eid = 2", {},
[],
[
'select eid, id from vtocc_a where eid = 2 limit 10001 for update',
"delete from vtocc_a where eid = 2 and id = 1 /* _stream vtocc_a (eid id ) (2 1 ); */",
],
],
['commit'],
['select * from vtocc_a where eid=2', {}, []],
# limit
['begin'],
["insert into vtocc_a(eid, id, name, foo) values (2, 1, '', '')"],
[
"delete from vtocc_a where eid = 2 limit 1", {},
[],
[
'select eid, id from vtocc_a where eid = 2 limit 1 for update',
"delete from vtocc_a where eid = 2 and id = 1 /* _stream vtocc_a (eid id ) (2 1 ); */",
],
],
['commit'],
['select * from vtocc_a where eid=2', {}, []],
# order by
['begin'],
["insert into vtocc_a(eid, id, name, foo) values (2, 1, '', '')"],
[
"delete from vtocc_a order by eid desc limit 1", {},
[],
[
'select eid, id from vtocc_a order by eid desc limit 1 for update',
"delete from vtocc_a where eid = 2 and id = 1 /* _stream vtocc_a (eid id ) (2 1 ); */",
],
],
['commit'],
['select * from vtocc_a where eid=2', {}, []],
# missing where
['begin'],
[
"delete from vtocc_a", {},
[],
[
'select eid, id from vtocc_a limit 10001 for update',
"delete from vtocc_a where eid = 1 and id = 1 /* _stream vtocc_a (eid id ) (1 1 ); */",
"delete from vtocc_a where eid = 1 and id = 2 /* _stream vtocc_a (eid id ) (1 2 ); */",
],
],
['rollback'],
['select * from vtocc_a', {}, [(1L, 1L, 'abcd', 'efgh'), (1L, 2L, 'bcde', 'fghi')]],
# no index
['begin'],
['insert into vtocc_d values (1, 1)'],
[
'delete from vtocc_d where eid =1 and id =1', {},
[],
['delete from vtocc_d where eid = 1 and id = 1'],
],
['commit'],
['select * from vtocc_d', {}, []],
# missing values
['begin'],
[
"insert into vtocc_e(foo) values ('foo')", {},
[],
["insert into vtocc_e(foo) values ('foo') /* _stream vtocc_e (eid id name ) (null 1 'bmFtZQ==' ); */"],
],
[
"insert into vtocc_e(foo) select foo from vtocc_a", {},
[],
["select foo from vtocc_a limit 10001", "insert into vtocc_e(foo) values ('efgh'), ('fghi') /* _stream vtocc_e (eid id name ) (null 1 'bmFtZQ==' ) (null 1 'bmFtZQ==' ); */"],
],
['delete from vtocc_e'],
['commit'],
]
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import traceback
class MultiDict(dict):
def __getattr__(self, name):
v = self[name]
if type(v)==dict:
v=MultiDict(v)
return v
def mget(self, mkey, default=None):
keys = mkey.split(".")
try:
v = self
for key in keys:
v = v[key]
except KeyError:
v = default
if type(v)==dict:
v = MultiDict(v)
return v
class TestException(Exception):
pass
class TestCase(object):
def __init__(self, testcase=None, verbose=False):
self.testcase = testcase
self.verbose = verbose
def run(self):
error_count = 0
try:
self.setUp()
if self.testcase is None:
testlist = [v for k, v in self.__class__.__dict__.iteritems() if k.startswith("test_")]
else:
testlist = [self.__class__.__dict__[self.testcase]]
for testfunc in testlist:
try:
testfunc(self)
except TestException, e:
print e
error_count += 1
finally:
self.tearDown()
if error_count == 0:
print "GREAT SUCCESS"
else:
print "Errors:", error_count
def assertNotEqual(self, val1, val2):
if val1 == val2:
raise TestException(self._format("FAIL: %s == %s"%(str(val1), str(val2))))
elif self.verbose:
print self._format("PASS")
def assertEqual(self, val1, val2):
if val1 != val2:
raise TestException(self._format("FAIL: %s != %s"%(str(val1), str(val2))))
elif self.verbose:
print self._format("PASS")
def assertFail(self, msg):
raise TestException(self._format("FAIL: %s"%msg))
def assertStartsWith(self, val, prefix):
if not val.startswith(prefix):
raise TestException(self._format("FAIL: %s does not start with %s"%(str(val), str(prefix))))
def assertContains(self, val, substr):
if substr not in val:
raise TestException(self._format("FAIL: %s does not contain %s"%(str(val), str(substr))))
def _format(self, msg):
frame = traceback.extract_stack()[-3]
if self.verbose:
return "Function: %s, Line %d: %s: %s"%(frame[2], frame[1], frame[3], msg)
else:
return "Function: %s, Line %d: %s"%(frame[2], frame[1], msg)
def setUp(self):
pass
def tearDown(self):
pass
class Tailer(object):
def __init__(self, f):
self.f = f
self.reset()
def reset(self):
self.f.seek(0, os.SEEK_END)
self.pos = self.f.tell()
def read(self):
self.f.seek(0, os.SEEK_END)
newpos = self.f.tell()
if newpos < self.pos:
return ""
self.f.seek(self.pos, os.SEEK_SET)
size = newpos-self.pos
self.pos = newpos
return self.f.read(size)
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Go-style RPC client using BSON as the codec.
import bson
try:
# use optimized cbson which has slightly different API
import cbson
decode_document = cbson.decode_next
except ImportError:
from bson import codec
decode_document = codec.decode_document
from net import gorpc
# Field name used for wrapping simple values as bson documents
# FIXME(msolomon) abandon this - too nasty when protocol requires upgrade
WRAPPED_FIELD = '_Val_'
class BsonRpcClient(gorpc.GoRpcClient):
def encode_request(self, req):
try:
if not isinstance(req.body, dict):
# hack to handle simple values
body = {WRAPPED_FIELD: req.body}
else:
body = req.body
return bson.dumps(req.header) + bson.dumps(body)
except Exception, e:
raise gorpc.GoRpcError('encode error', e)
# fill response with decoded data
def decode_response(self, response, data):
try:
offset, response.header = decode_document(data, 0)
offset, response.reply = decode_document(data, offset)
# unpack primitive values
# FIXME(msolomon) remove this hack
response.reply = response.reply.get(WRAPPED_FIELD, response.reply)
except Exception, e:
raise gorpc.GoRpcError('decode error', e)
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class MCBSonException(Exception):
pass
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Handle transport and serialization callbacks for Go-style RPC servers.
#
# This is pretty simple. The client initiates an HTTP CONNECT and then
# hijacks the socket. The client is synchronous, but implements deadlines.
import errno
import socket
import struct
import time
import urlparse
_len = len
_join = ''.join
class GoRpcError(Exception):
pass
class TimeoutError(GoRpcError):
pass
# Error field from response raised as an exception
class AppError(GoRpcError):
pass
def make_header(method, sequence_id):
return {'ServiceMethod': method,
'Seq': sequence_id}
class GoRpcRequest(object):
header = None # standard fields that route the request on the server side
body = None # the actual request object - usually a dictionary
def __init__(self, header, args):
self.header = header
self.body = args
@property
def sequence_id(self):
return self.header['Seq']
class GoRpcResponse(object):
# the request header is echoed back to detect error and out-of-sequence bugs
# {'ServiceMethod': method,
# 'Seq': sequence_id,
# 'Error': error_string}
header = None
reply = None # the decoded object - usually a dictionary
@property
def error(self):
return self.header['Error']
@property
def sequence_id(self):
return self.header['Seq']
# FIXME(msolomon) This is a bson-ism, fix for future protocols.
len_struct = struct.Struct('<i')
unpack_length = len_struct.unpack_from
len_struct_size = len_struct.size
default_read_buffer_size = 8192
# A single socket wrapper to handle request/response conversation for this
# protocol. Internal, use GoRpcClient instead.
class _GoRpcConn(object):
def __init__(self, timeout):
self.conn = None
self.timeout = timeout
self.start_time = None
def dial(self, uri):
parts = urlparse.urlparse(uri)
netloc = parts.netloc.split(':')
# NOTE(msolomon) since the deadlines are approximate in the code, set
# timeout to oversample to minimize waiting in the extreme failure mode.
socket_timeout = self.timeout / 10.0
self.conn = socket.create_connection((netloc[0], int(netloc[1])),
socket_timeout)
self.conn.sendall('CONNECT %s HTTP/1.0\n\n' % parts.path)
while True:
data = self.conn.recv(1024)
if not data:
raise GoRpcError('Unexpected EOF in handshake')
if '\n\n' in data:
return
def close(self):
if self.conn:
self.conn.close()
self.conn = None
def _check_deadline_exceeded(self):
if (time.time() - self.start_time) > self.timeout:
raise socket.timeout('deadline exceeded')
return False
def write_request(self, request_data):
self.start_time = time.time()
self.conn.sendall(request_data)
# FIXME(msolomon) This makes a couple of assumptions from bson encoding.
def read_response(self):
if self.start_time is None:
raise GoRpcError('no request pending')
try:
buf = []
buf_write = buf.append
data, data_len = _read_more(self.conn, buf, buf_write)
# must read at least enough to get the length
while data_len < len_struct_size and not self._check_deadline_exceeded():
data, data_len = _read_more(self.conn, buf, buf_write)
# header_len is the size of the entire header including the length
# add on an extra len_struct_size to get enough of the body to read size
header_len = unpack_length(data)[0]
while (data_len < (header_len + len_struct_size) and
not self._check_deadline_exceeded()):
data, data_len = _read_more(self.conn, buf, buf_write)
# body_len is the size of the entire body - same as above
body_len = unpack_length(data, header_len)[0]
total_len = header_len + body_len
while data_len < total_len and not self._check_deadline_exceeded():
data, data_len = _read_more(self.conn, buf, buf_write)
return data
finally:
self.start_time = None
def _read_more(conn, buf, buf_write):
try:
data = conn.recv(default_read_buffer_size)
if not data:
# We only read when we expect data - if we get nothing this probably
# indicates that the server hung up. This exception ensure the client
# tears down properly.
raise socket.error(errno.EPIPE, 'unexpected EOF in read')
except socket.timeout:
# catch the timeout and return empty data for now - this breaks the call
# and lets the deadline get caught with reasonable precision.
data = ''
if buf:
buf_write(data)
data = _join(buf)
else:
buf_write(data)
return data, _len(data)
class GoRpcClient(object):
def __init__(self, uri, timeout):
self.uri = uri
self.timeout = timeout
# FIXME(msolomon) make this random initialized?
self.seq = 0
self._conn = None
@property
def conn(self):
if not self._conn:
self._conn = _GoRpcConn(self.timeout)
self._conn.dial(self.uri)
return self._conn
def close(self):
if self._conn:
self._conn.close()
self._conn = None
def next_sequence_id(self):
self.seq += 1
return self.seq
# return encoded request data, including header
def encode_request(self, req):
raise NotImplementedError
# fill response with decoded data
def decode_response(self, response, data):
raise NotImplementedError
# Perform an rpc, raising a GoRpcError, on errant situations.
# Pass in a response object if you don't want a generic one created.
def call(self, method, request, response=None):
try:
h = make_header(method, self.next_sequence_id())
req = GoRpcRequest(h, request)
self.conn.write_request(self.encode_request(req))
data = self.conn.read_response()
if response is None:
response = GoRpcResponse()
self.decode_response(response, data)
except socket.timeout, e:
# tear down - can't guarantee a clean conversation
self.close()
raise TimeoutError(e, self.timeout, method)
except socket.error, e:
# tear down - better chance of recovery by reconnecting
self.close()
raise GoRpcError(e, method)
if response.error:
raise AppError(response.error, method)
if response.sequence_id != req.sequence_id:
# tear down - off-by-one error in the connection somewhere
self.close()
raise GoRpcError('request sequence mismatch', response.sequence_id,
req.sequence_id, method)
return response
| Python |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| Python |
import pygame
class Gun():
def __init__(self, kind):
self.coolDown = 0
if kind == "pistol":
self.kind = kind
self.gunSpeed = 1000
self.ammo = None
self.coolDownMax = 1000
if kind == "melee":
self.kind = kind
self.gunSpeed = 1
self.ammo = None
self.coolDownMax = 1000
| Python |
import pygame, math
class Ball():
def __init__(self, image, speed = [0,0], pos = [0,0]):
self.image = pygame.image.load(image)
self.rect = self.image.get_rect()
self.speedx = speed[0]
self.speedy = speed[1]
self.speed = [self.speedx, self.speedy]
self.place(pos)
self.maxSpeed = 10
self.didBounceX = False
self.didBounceY = False
self.living = True
def place(self, pos):
self.rect.center = pos
def update(self, width, height):
self.didBounceX = False
self.didBounceY = False
self.speed = [self.speedx, self.speedy]
self.move()
self.collideWall(width, height)
def move(self):
self.rect = self.rect.move(self.speed)
def collideWall(self, width, height):
if not self.didBounceX:
#print "trying to hit Wall"
if self.rect.left < 0 or self.rect.right > width:
self.speedx = 0
self.didBounceX = True
#print "hit xWall"
if not self.didBounceY:
if self.rect.bottom > 650 or self.rect.top < 0:
self.speedy = 0
self.didBounceY = True
#print "hit xWall"
def collideBall(self, other):
if self != other:
if self.rect.right > other.rect.left and self.rect.left < other.rect.right:
if self.rect.bottom > other.rect.top and self.rect.top < other.rect.bottom:
if (self.radius + other.radius) > self.distance(other.rect.center):
if not self.didBounceX:
self.speedx = -self.speedx
self.didBouncex = True
if not self.didBounceY:
self.speedy = -self.speedy
self.didBounceY = True
#print "hit Ball"
def collidePlayer(self, other):
if self != other:
if self.rect.right > other.rect.left and self.rect.left < other.rect.right:
if self.rect.bottom > other.rect.top and self.rect.top < other.rect.bottom:
if (self.radius + other.radius) > self.distance(other.rect.center):
self.living = False
| Python |
import pygame
from Ball import Ball
class Bullet(Ball):
def __init__(self, pos, bspeed, direction, owner):
Ball.__init__(self, "images/LAZER.png", [0,0], pos)
self.maxSpeed = 20
if direction == "right":
self.speedx = self.maxSpeed
if direction == "left" :
self.speedx = -self.maxSpeed
self.owner = owner
def collidePlayer(self, other):
if other != self.owner:
if self.rect.right > other.rect.left and self.rect.left < other.rect.right:
if self.rect.bottom > other.rect.top and self.rect.top < other.rect.bottom:
self.living = False
print "hit"
def collideWall (self, width, height):
if not self.didBounceX:
if self.rect.left < 0 or self.rect.right > width:
self.living = False
if not self.didBounceX:
if self.rect.top < 0 or self.rect.bottom > height:
self.living = False
| Python |
import pygame, sys, random
from Player import Player
from Bullet import Bullet
from melee import Melee
from hud import Score
from hud import Text
pygame.init()
clock = pygame.time.Clock()
width = 896
height = 700
size = width, height
screen = pygame.display.set_mode(size)
bgColor = r,g,b = 0, 0, 0
run = False
scores = {"right":Score([776, 25], "Score: ", 36),"left":Score([100, 25], "Score: ", 36)}
while True:
bgImage = pygame.image.load("Menu.png").convert()
bgRect = bgImage.get_rect()
while not run:
for event in pygame.event.get():
if event.type == pygame.QUIT: sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
run = True
bgColor = r,g,b
screen.fill(bgColor)
screen.blit(bgImage, bgRect)
pygame.display.flip()
clock.tick(60)
bgImage = pygame.image.load("Map.png").convert()
bgRect = bgImage.get_rect()
melees = []
bullets = []
players = [Player([800,593], "left"), Player([100,593], "right")]
while run and len(players) == 2:
for event in pygame.event.get():
if event.type == pygame.QUIT: sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_UP:
players[0].go("up")
if event.key == pygame.K_RIGHT:
players[0].go("right")
if event.key == pygame.K_LEFT:
players[0].go("left")
if event.key == pygame.K_KP0:
bullets += players[0].shoot("fire")
if event.key == pygame.K_KP_PERIOD:
melees += players[0].shoot("melee")
if event.key == pygame.K_w:
players[1].go("up")
if event.key == pygame.K_d:
players[1].go("right")
if event.key == pygame.K_a:
players[1].go("left")
if event.key == pygame.K_g:
bullets += players[1].shoot("fire")
if event.key == pygame.K_h:
bullets += players[1].shoot("melee")
if event.key == pygame.K_1:
pygame.mixer.music.load("Drum.mp3")
pygame.mixer.music.play()
if event.key == pygame.K_2:
pygame.mixer.music.load("Drum2.mp3")
pygame.mixer.music.play()
if event.key == pygame.K_3:
pygame.mixer.music.load("Drum3.mp3")
pygame.mixer.music.play()
if event.key == pygame.K_4:
pygame.mixer.music.load("Drum4.mp3")
pygame.mixer.music.play()
if event.key == pygame.K_5:
pygame.mixer.music.load("Drum5.mp3")
pygame.mixer.music.play()
if event.type == pygame.KEYUP:
if event.key == pygame.K_UP:
players[0].go("down")
if event.key == pygame.K_RIGHT:
players[0].go("stop right")
if event.key == pygame.K_LEFT:
players[0].go("stop left")
if event.key == pygame.K_KP0:
players[0].shoot("stop")
if event.key == pygame.K_KP_PERIOD:
players[0].shoot("stop")
if event.key == pygame.K_w:
players[1].go("down")
if event.key == pygame.K_d:
players[1].go("stop right")
if event.key == pygame.K_a:
players[1].go("stop left")
if event.key == pygame.K_g:
players[1].shoot("stop")
if event.key == pygame.K_h:
players[1].shoot("stop")
for player in players:
player.update(width, height)
for bullet in bullets:
bullet.update(width, height)
for bullet in bullets:
for player in players:
bullet.collidePlayer(player)
player.collideBullet(bullet)
for bullet in bullets:
if not bullet.living:
bullets.remove(bullet)
for melee in melees:
melee.update(width, height)
for melee in melees:
for player in players:
melee.collidePlayer(player)
player.collideBullet(melee)
for melee in melees:
if not melee.living:
melees.remove(melee)
for player in players:
if not player.living:
if player.side == "right":
scores["left"].increaseScore(1)
else:
scores["right"].increaseScore(1)
players.remove(player)
for score in scores.values():
score.update()
bgColor = r,g,b
screen.fill(bgColor)
screen.blit(bgImage, bgRect)
for bullet in bullets:
screen.blit(bullet.image, bullet.rect)
for melee in melees:
screen.blit(melee.image, melee.rect)
for player in players:
screen.blit(player.image, player.rect)
for score in scores.values():
screen.blit(score.image, score.rect)
pygame.display.flip()
clock.tick(60)
run = False
| Python |
import pygame
from Ball import Ball
class Melee(Ball):
def __init__(self, pos, bspeed, direction, owner):
Ball.__init__(self, "images/LAZER.png", [0,0], pos)
self.maxSpeed = 50
if direction == "right":
self.speedx = self.maxSpeed
self.living = False
if direction == "left" :
self.speedx = -self.maxSpeed
self.living = False
self.owner = owner
def collidePlayer(self, other):
if other != self.owner:
if self.rect.right > other.rect.left and self.rect.left < other.rect.right:
if self.rect.bottom > other.rect.top and self.rect.top < other.rect.bottom:
self.living = False
print "hit"
def collideWall (self, width, height):
if not self.didBounceX:
if self.rect.left < 0 or self.rect.right > width:
self.living = False
if not self.didBounceX:
if self.rect.top < 0 or self.rect.bottom > height:
self.living = False
| Python |
import pygame
class Text():
def __init__(self, pos, text = "", textSize = 12, textColor=(0,0,0), font = None):
self.text = text
self.textColor = textColor
self.font = pygame.font.Font(font, textSize)
self.image = self.font.render(self.text, 1, textColor)
self.rect = self.image.get_rect()
self.place(pos)
def place(self, pos):
self.rect.center = pos
def setText(self, text):
self.text = text
self.image = self.font.render(text, 1, textColor)
self.rect = self.image.get_rect(center = self.rect.center)
def update(self, width, height):
pass
class Score(Text):
def __init__(self, pos, baseText = "0", textSize = 12, textColor=(0,0,0), font = None):
self.score = 0
self.baseText = baseText
self.text = self.baseText + str(self.score)
Text.__init__(self, pos, self.text, textSize, textColor, font)
self.change = False
def setText(self, text):
self.baseText = text
self.change = True
def update(self):
if self.change:
self.text = self.baseText + str(self.score)
self.image = self.font.render(self.text, 1, self.textColor)
self.rect = self.image.get_rect(center = self.rect.center)
self.change = False
def setScore(self, score):
self.score = score
self.change = True
def increaseScore(self, amount = 1):
self.score += amount
self.change = True
def resetScore(self):
self.score = 0
self.change = True
| Python |
import pygame
from Ball import Ball
from Bullet import Bullet
from melee import Melee
from gun import Gun
class Player(Ball):
def __init__(self, pos, facing):
if facing == "right":
self.side = "left"
else:
self.side = "right"
Ball.__init__(self, "images/p1_walk01.png", [0,0], pos)
self.upImages = [pygame.image.load("images/p1_jump.png"),
pygame.image.load("images/p1_jump.png"),
pygame.image.load("images/p1_jump.png"),
pygame.image.load("images/p1_jump.png"),
pygame.image.load("images/p1_jump.png"),
pygame.image.load("images/p1_jump.png"),
pygame.image.load("images/p1_jump.png"),
pygame.image.load("images/p1_jump.png"),
pygame.image.load("images/p1_jump.png"),
pygame.image.load("images/p1_jump.png"),
pygame.image.load("images/p1_jump.png"),]
self.downImages = [pygame.image.load("images/p1_walk03.png"),
pygame.image.load("images/p1_walk03.png"),
pygame.image.load("images/p1_walk03.png"),
pygame.image.load("images/p1_walk03.png"),
pygame.image.load("images/p1_walk03.png"),
pygame.image.load("images/p1_walk03.png"),
pygame.image.load("images/p1_walk03.png"),
pygame.image.load("images/p1_walk03.png"),
pygame.image.load("images/p1_walk03.png"),
pygame.image.load("images/p1_walk03.png"),
pygame.image.load("images/p1_walk03.png")]
self.leftImages = [pygame.image.load("images/p1_walk01L.png"),
pygame.image.load("images/p1_walk02L.png"),
pygame.image.load("images/p1_walk03L.png"),
pygame.image.load("images/p1_walk04L.png"),
pygame.image.load("images/p1_walk05L.png"),
pygame.image.load("images/p1_walk06L.png"),
pygame.image.load("images/p1_walk07L.png"),
pygame.image.load("images/p1_walk08L.png"),
pygame.image.load("images/p1_walk09L.png"),
pygame.image.load("images/p1_walk10L.png"),
pygame.image.load("images/p1_walk11L.png")]
self.rightImages = [pygame.image.load("images/p1_walk01.png"),
pygame.image.load("images/p1_walk02.png"),
pygame.image.load("images/p1_walk03.png"),
pygame.image.load("images/p1_walk04.png"),
pygame.image.load("images/p1_walk05.png"),
pygame.image.load("images/p1_walk06.png"),
pygame.image.load("images/p1_walk07.png"),
pygame.image.load("images/p1_walk08.png"),
pygame.image.load("images/p1_walk09.png"),
pygame.image.load("images/p1_walk10.png"),
pygame.image.load("images/p1_walk11.png")]
self.stopImages = [pygame.image.load("images/p1_walk08.png"),
pygame.image.load("images/p1_walk08.png"),
pygame.image.load("images/p1_walk08.png"),
pygame.image.load("images/p1_walk08.png"),
pygame.image.load("images/p1_walk08.png"),
pygame.image.load("images/p1_walk08.png"),
pygame.image.load("images/p1_walk08.png"),
pygame.image.load("images/p1_walk08.png"),
pygame.image.load("images/p1_walk08.png"),
pygame.image.load("images/p1_walk08.png"),
pygame.image.load("images/p1_walk08.png")]
self.facing = facing
self.changed = False
self.images = self.rightImages
self.frame = 0
self.maxFrame = len(self.images) - 1
self.waitCount = 0
self.maxWait = 60*.25
self.image = self.images[self.frame]
self.rect = self.image.get_rect(center = self.rect.center)
self.pistol = Gun("pistol")
self.fist = Gun("melee")
self.gun = self.pistol
self.shooting = False
self.living = True
def collideBullet(self, other):
if other.owner != self:
if self.rect.right > other.rect.left and self.rect.left < other.rect.right:
if self.rect.bottom > other.rect.top and self.rect.top < other.rect.bottom:
self.living = False
print "dead"
def update(self, width, height):
Ball.update(self, width, height)
self.animate()
self.changed = False
def animate(self):
if self.waitCount < self.maxWait:
self.waitCount += 5
else:
self.waitCount = 0
self.changed = True
if self.frame < self.maxFrame:
self.frame += 1
else:
self.frame = 0
if self.changed:
if self.facing == "up":
self.images = self.upImages
elif self.facing == "down":
self.images = self.downImages
elif self.facing == "right":
self.images = self.rightImages
elif self.facing == "left":
self.images = self.leftImages
elif self.facing == "stop":
self.images = self.stopImages
self.image = self.images[self.frame]
def go(self, direction):
if direction == "up":
self.facing = "up"
self.changed = True
self.speedy = -self.maxSpeed
elif direction == "down":
self.facing = "down"
self.changed = True
self.speedy = 5
if direction == "right":
self.facing = "right"
self.changed = True
self.speedx = self.maxSpeed
elif direction == "stop right":
self.speedx = 0
elif direction == "left":
self.facing = "left"
self.changed = True
self.speedx = -self.maxSpeed
elif direction == "stop left":
self.speedx = 0
def shoot(self, command):
if command == "stop":
self.shooting = False
if self.facing != "up":
if self.facing != "down":
if command == "fire":
return [Bullet(self.rect.center, self.gun.gunSpeed, self.facing,self)]
self.shooting = True
if command == "melee":
return[Melee(self.rect.center, self.gun.gunSpeed, self.facing,self)]
self.shooting = True
return []
| Python |
#!/usr/bin/python
import re
import string
import sys
import time
from xml.sax import saxutils
class Question:
text = None
answers = []
rightAnswer = None
difficulty = None
def __init__(self):
self.text = None
self.answers = []
self.rightAnswer = None
self.difficulty = None
results = []
for fname in sys.argv[1:]:
fh = open(fname)
tex = fh.readlines()
q = None
qEnded = 0
aStarted = 0
for line in tex:
# create Question and set text to first line (rest of lines will be added
# later)
if q == None and re.search('\\\\bq{[0-9]*?}', line) != None and \
re.search('^\s*%', line) == None:
dif = re.sub('.*\\\\bq{([0-9]*?)}.*\n', '\\1', line)
newline = re.sub('\\\\bq{[0-9]*?}', '', line)
newline = re.sub('\\\\label{.*?} ', '', newline)
q = Question()
q.text = newline
q.difficulty = int(dif)
continue
# add rest of question text
if q != None and re.search('\\\\eq', line) == None and not qEnded:
q.text = q.text + line
continue
# question text ends when we find \eq
if q != None and re.search('\\\\eq', line):
qEnded = 1
continue
# answers start when we find \begin{answers}
if q != None and qEnded and re.search('\\\\begin{answers}', line) != None:
aStarted = 1
continue
if q != None and aStarted and re.search('\\\\end{answers}', line) != None:
aStarted = 0
qEnded = 0
cnt = 0
tmparr = []
for ans in q.answers:
if ans.find('\\label{q\\theqnum:a}') != -1:
q.rightAnswer = cnt
ans = ans.replace('\\label{q\\theqnum:a}', '')
ans = ans.lstrip()
ans = ans.rstrip()
tmparr.append(ans)
cnt = cnt + 1
q.text = q.text.lstrip()
q.text = q.text.rstrip()
q.answers = tmparr
results.append(q)
q = None
continue
if q != None and aStarted and re.search('\\\\item ', line) == None:
q.answers[len(q.answers)-1] = q.answers[len(q.answers)-1] + '\n' + line
continue
if q != None and aStarted and re.search('\\\\item ', line) != None:
ans = re.sub('\\\\item ', '', line)
ans = re.sub('\n','', ans)
q.answers.append(ans)
continue
print '<?xml version="1.0" encoding="UTF-8"?>'
print '<questions>'
for q in results:
print '<question difficulty="%d">' % (q.difficulty)
print '<text>%s</text>' % (saxutils.escape(q.text))
print '<answers>'
num = 0
for a in q.answers:
print '<answer correct="%d">%s</answer>' % (num == q.rightAnswer,
saxutils.escape(a))
num = num + 1
print '</answers>'
print '</question>\n'
print '</questions>'
| Python |
#!/usr/bin/python
import re
import string
import sys
import time
from xml.sax import saxutils
class Question:
text = None
answers = []
rightAnswer = None
difficulty = None
def __init__(self):
self.text = None
self.answers = []
self.rightAnswer = None
self.difficulty = None
results = []
for fname in sys.argv[1:]:
fh = open(fname)
tex = fh.readlines()
q = None
qEnded = 0
aStarted = 0
for line in tex:
# create Question and set text to first line (rest of lines will be added
# later)
if q == None and re.search('\\\\bq{[0-9]*?}', line) != None and \
re.search('^\s*%', line) == None:
dif = re.sub('.*\\\\bq{([0-9]*?)}.*\n', '\\1', line)
newline = re.sub('\\\\bq{[0-9]*?}', '', line)
newline = re.sub('\\\\label{.*?} ', '', newline)
q = Question()
q.text = newline
q.difficulty = int(dif)
continue
# add rest of question text
if q != None and re.search('\\\\eq', line) == None and not qEnded:
q.text = q.text + line
continue
# question text ends when we find \eq
if q != None and re.search('\\\\eq', line):
qEnded = 1
continue
# answers start when we find \begin{answers}
if q != None and qEnded and re.search('\\\\begin{answers}', line) != None:
aStarted = 1
continue
if q != None and aStarted and re.search('\\\\end{answers}', line) != None:
aStarted = 0
qEnded = 0
cnt = 0
tmparr = []
for ans in q.answers:
if ans.find('\\label{q\\theqnum:a}') != -1:
q.rightAnswer = cnt
ans = ans.replace('\\label{q\\theqnum:a}', '')
ans = ans.lstrip()
ans = ans.rstrip()
tmparr.append(ans)
cnt = cnt + 1
q.text = q.text.lstrip()
q.text = q.text.rstrip()
q.answers = tmparr
results.append(q)
q = None
continue
if q != None and aStarted and re.search('\\\\item ', line) == None:
q.answers[len(q.answers)-1] = q.answers[len(q.answers)-1] + '\n' + line
continue
if q != None and aStarted and re.search('\\\\item ', line) != None:
ans = re.sub('\\\\item ', '', line)
ans = re.sub('\n','', ans)
q.answers.append(ans)
continue
print '<?xml version="1.0" encoding="UTF-8"?>'
print '<questions>'
for q in results:
print '<question difficulty="%d">' % (q.difficulty)
print '<text>%s</text>' % (saxutils.escape(q.text))
print '<answers>'
num = 0
for a in q.answers:
print '<answer correct="%d">%s</answer>' % (num == q.rightAnswer,
saxutils.escape(a))
num = num + 1
print '</answers>'
print '</question>\n'
print '</questions>'
| Python |
"""
------------------------------------------
-- Fiiction --
------------------------------------------
-- Interactive fiction by Fiona Burrows --
------------------------------------------
Game class
Holds the main game module, the main game should
extend from this.
"""
from fiiction.parse import parse_command
class Game:
game_name = "Fiiction Game"
author = "Your Name Here"
rooms = {}
things = {}
starting_room = None
starting_items = []
description = "Description of your game is displayed at the start"
starting_message = "Message for the start of the game is displayed at the start"
# set to true to quickly quit
end_game = False
current_input = ""
current_room = None
suppress_room_description = False
# ------------------------------------------------
def __init__(self):
pass
# ------------------------------------------------
def start_game(self):
"The function that starts off a game."
# create exits, objects and whatever else we want to do
for room_name in self.rooms:
self.rooms[room_name].create_room()
# first message
Game.output(
["Fiiction by Fiona Burrows",
"-------------------------",
"%s by %s" % (self.game_name, self.author),
"-------------------------",
self.description,
"",
"",
self.starting_message,
"-------------------------"]
)
# go to the first room ... or not
if self.starting_room == None:
Game.output("You have no starting room set!")
else:
self.current_room = self.starting_room
self.starting_room.enter()
self.game_loop()
# ------------------------------------------------
@classmethod
def output(self, text_to_output):
"Outputs text to the screen, accepts string or list."
# Single strings
if type(text_to_output) == type(""):
print text_to_output
# list of strings
elif type(text_to_output) == type([]):
for single_text in text_to_output:
print single_text
# ------------------------------------------------
def game_loop(self):
"Keeps running, keeps checking input and doing things with it."
while self.end_game == False:
self.current_input = raw_input("\n>>>").lower().strip()
if self.current_input == "quit":
self.end_game = True
else:
# parse input
if self.current_input != "":
command_return = parse_command(self.current_input, self)
if command_return == False:
Game.output("I don't understand.")
# room describe
if self.suppress_room_description == False:
Game.output("\nYou are in %s" % self.current_room.room_name)
self.current_room.describe_exits()
self.current_room.list_items()
self.suppress_room_description = False
Game.output("Bye!")
# ------------------------------------------------
def move_to_room(self, room_to):
room_to.enter()
self.current_room = room_to
self.suppress_room_description = True
# ------------------------------------------------
def __str__(self):
return self.game_name
| Python |
"""
------------------------------------------
-- Fiiction --
------------------------------------------
-- Interactive fiction by Fiona Burrows --
------------------------------------------
Helper room classes
Here are some classes to extend from to create
basic "default" rooms without much effort
"""
from fiiction.room import Room
from fiiction.dir import north, south, west, east
class hCavern(Room):
room_name = "a cavern"
def create_room(self):
default_exit = "There's just a rocky cave wall that way."
self.exits = {
"north" : north.North(default_exit, list=False),
"south" : south.South(default_exit, list=False),
"east" : east.East(default_exit, list=False),
"west" : west.West(default_exit, list=False)
} | Python |
"""
------------------------------------------
-- Fiiction --
------------------------------------------
-- Interactive fiction by Fiona Burrows --
------------------------------------------
Room class
Deals with room stuff...
"""
from fiiction.game import Game
class Room:
room_name = "My Room"
room_init_description = "Initial description of the room when first entered."
room_description = "Description of the room."
things = {}
exits = {}
times_entered = 0
game = None
def __init__(self, game_class):
self.game = game_class
def create_room(self):
pass
def enter(self):
if self.times_entered == 0 and self.room_init_description != "":
Game.output(self.room_init_description + "\n")
self.describe()
Game.output("")
self.describe_exits()
self.list_items()
self.times_entered += 1
def describe(self):
Game.output(self.room_description)
def describe_exits(self):
if len(self.exits) > 0:
exit_build = []
for direction in self.exits:
if self.exits[direction].list == True:
exit_build.append(self.exits[direction].list_name)
if len(exit_build) > 0:
Game.output("There are exits %s" % ", ".join(exit_build))
def list_items(self):
if len(self.things) > 0:
item_build = []
for item in self.things:
if self.things[item].list == True:
item_build.append(self.things[item].list_name)
if len(item_build) > 0:
Game.output("You can also see %s" % ", ".join(item_build))
def __str__(self):
return self.room_name
| Python |
"""
------------------------------------------
-- Fiiction --
------------------------------------------
-- Interactive fiction by Fiona Burrows --
------------------------------------------
Thing class
Basic item...
"""
from fiiction.game import Game
class Thing:
thing_name = "My Thing"
list_name = "a thing"
thing_description = "Description of the room."
list = True
game = None
def __init__(self, game_class):
self.game = game_class
def describe(self):
Game.output(self.thing_description)
def __str__(self):
return self.thing_name
| Python |
"""
------------------------------------------
-- Fiiction --
------------------------------------------
-- Interactive fiction by Fiona Burrows --
------------------------------------------
Direction class
South
"""
from fiiction.dir import Direction
class South(Direction):
list_name = "south"
commands = ["south", "s"]
travelling = "You travel south..."
examination = "There is an exit leading south."
| Python |
"""
------------------------------------------
-- Fiiction --
------------------------------------------
-- Interactive fiction by Fiona Burrows --
------------------------------------------
Direction class
West
"""
from fiiction.dir import Direction
class West(Direction):
list_name = "west"
commands = ["west", "w"]
travelling = "You travel west..."
examination = "There is an exit leading west."
| Python |
"""
------------------------------------------
-- Fiiction --
------------------------------------------
-- Interactive fiction by Fiona Burrows --
------------------------------------------
Direction class
North
"""
from fiiction.dir import Direction
class North(Direction):
list_name = "north"
commands = ["north", "n"]
travelling = "You travel north..."
examination = "There is an exit leading north."
| Python |
"""
------------------------------------------
-- Fiiction --
------------------------------------------
-- Interactive fiction by Fiona Burrows --
------------------------------------------
Direction class
East
"""
from fiiction.dir import Direction
class East(Direction):
list_name = "east"
commands = ["east", "e"]
travelling = "You travel east..."
examination = "There is an exit leading east."
| Python |
"""
------------------------------------------
-- Fiiction --
------------------------------------------
-- Interactive fiction by Fiona Burrows --
------------------------------------------
Direction class
"""
class Direction:
# "There are directions north, south ..."
list_name = ""
# Commands to accept
commands = []
# Travelling message
travelling = ""
# Examination message
examination = ""
connecting_room = None
cant_travel = "";
list = True
# Constructor
def __init__(self, room, list = True):
# A string means it's a dead end
if type(room) == type(""):
self.cant_travel = room
else:
self.connecting_room = room
self.list = list | Python |
"""
------------------------------------------
-- Fiiction --
------------------------------------------
-- Interactive fiction by Fiona Burrows --
------------------------------------------
Parser deals with commands
"""
import re
def parse_command(command, game):
commands = command.strip().split()
game.suppress_room_description = True
# ------------
# EXAMINATION
# ------------
regular_exp = re.compile(r"^(?:look|examine)(?: at| upon)?(?P<object> .*)?", re.IGNORECASE).match(command);
if regular_exp != None:
captured_object = regular_exp.group("object");
# probably want the room
if captured_object == None:
game.suppress_room_description = False
game.output(game.current_room.room_description)
return True
# Want to look at something else
else:
captured_object = captured_object.strip();
# is it an object?
dir_command = check_command_is_thing(captured_object, game)
if type(dir_command) == type(""):
game.output(game.current_room.things[dir_command].thing_description)
return True
# is it a direction?
dir_command = check_command_is_direction(captured_object, game)
if type(dir_command) == type(""):
game.output(game.current_room.exits[dir_command].examination)
return True
game.output("There's nothing to look at.")
return True
# ------------
# TRAVELLING
# ------------
else:
dir_command = check_command_is_direction(commands[0], game)
if type(dir_command) == type(""):
# dead end?
if game.current_room.exits[dir_command].connecting_room == None:
game.output(game.current_room.exits[dir_command].cant_travel)
else:
game.output(game.current_room.exits[dir_command].travelling + "\n")
game.move_to_room(game.current_room.exits[dir_command].connecting_room)
return True
return False
# ------------------------------------------------
# Check that whatever command we're given is a direction in
# the room or not and if so, return the exit key
def check_command_is_direction(command, game):
if len(game.current_room.exits) > 0:
for exit in game.current_room.exits:
if len(game.current_room.exits[exit].commands) > 0:
for exit_comm in game.current_room.exits[exit].commands:
if exit_comm == command:
return exit
return False
# ------------------------------------------------
# Check that whatever command we're given is a thing
# in the room or not and if so, return the things key
def check_command_is_thing(command, game):
if len(game.current_room.things) > 0:
for obj in game.current_room.things:
if game.current_room.things[obj].thing_name == command:
return obj
return False
| Python |
from fiiction.game import Game
from rooms import start, second_room
from things import rock
class awesome_zone(Game):
game_name = "THIS IS THE AWESOME ZONE"
author = "Fiona"
description = "awesomeest game in the world"
starting_message = "woaaaaaaaaaaaaaaah wtf"
# create instance
game = awesome_zone()
# create rooms
game.rooms = {
"start_room" : start.start(game),
"second_room" : second_room.second_room(game)
}
# create objects
game.things = {
"rock" : rock.rock(game)
}
game.starting_room = game.rooms['start_room']
# start the game off
game.start_game() | Python |
"""
a rock lol
"""
from fiiction.thing import Thing
class rock(Thing):
thing_name = "rock"
list_name = "a rock"
thing_description = "It's just a boring rock." | Python |
"""
Second room
"""
from fiiction.room.helpers import hCavern
from fiiction.dir.south import South
class second_room(hCavern):
room_name = "another cave"
room_init_description = ""
room_description = "This is a pretty shitty room."
def create_room(self):
hCavern.create_room(self)
self.exits ["south"] = South(self.game.rooms['start_room']);
| Python |
"""
MY STARTING ROOM
"""
from fiiction.room.helpers import hCavern
from fiiction.dir.north import North
class start(hCavern):
room_name = "a cave"
room_init_description = """You fall into a small hole and end up in a cave.\n
Don'task me, I just work here..."""
room_description = "It is a very dark cave. What else do you want?"
def create_room(self):
hCavern.create_room(self)
self.exits["north"] = North(self.game.rooms['second_room']);
self.things["rock"] = self.game.things["rock"];
| Python |
#coding=utf-8
"""
Author: Maple
"""
import os
import sys
import wmi
import wx
"""
Get Windows partition-list:
Return: A list of partition-instance.
"""
def get_partition_info():
partition_list = []
w = wmi.WMI()
for physical_disk in w.Win32_DiskDrive():
for partition in physical_disk.associators("Win32_DiskDriveToDiskPartition"):
for logical_disk in partition.associators("Win32_LogicalDiskToPartition"):
partition_list.append(logical_disk)
return partition_list
"""
File index generator:
dir_list -> Directory list
Return: A file index generator
"""
def build_file_index(dir_list):
for dir_element in dir_list:
if not dir_element.endswith(os.sep):
dir_element = dir_element + os.sep
walk_rs = os.walk(dir_element)
for e in walk_rs:
for f in e[2]:
fpath = os.path.join(e[0], f)
yield fpath
"""
File filter:
file_list -> List or Iterator
text -> Filter text
mode -> Match file or directory
match -> Match method, starts, ends , full or in
Return: A file generator of filtered
"""
def get_files(file_list=[], text='', mode='file', match='starts'):
for fpath in file_list:
if mode == 'dir':
if text in fpath.split(os.sep)[:-1]:
yield fpath
if mode == 'file':
if match in ['starts', 'ends']:
code_str = '"%s".%s("%s")' % (fpath.split(os.sep)[-1], match+'with', text)
if match == 'full':
code_str = '"%s" == "%s"' % (text, fpath.split(os.sep)[-1])
if match == 'in':
code_str = '"%s" in "%s"' % (text, fpath.split(os.sep)[-1])
if eval(code_str):
yield fpath
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python.
Tested With:
Standard:
Python 2.3.3
Zope:
Zope Version: (Zope 2.8.1-final, python 2.3.5, linux2)
Python Version: 2.3.5 (#4, Mar 10 2005, 01:40:25)
[GCC 3.3.3 20040412 (Red Hat Linux 3.3.3-7)]
System Platform: linux2
"""
"""
Author Notes (04 December 2005):
This module has gone through quite a few phases of change. Obviously,
I am only supporting that part of the code that I use. Initially
I had the upload directory as a part of zope (ie. uploading files
directly into Zope), before realising that there were too many
complex intricacies within Zope to deal with. Zope is one ugly piece
of code. So I decided to complement Zope by an Apache server (which
I had running anyway, and doing nothing). So I mapped all uploads
from an arbitrary server directory to an arbitrary web directory.
All the FCKeditor uploading occurred this way, and I didn't have to
stuff around with fiddling with Zope objects and the like (which are
terribly complex and something you don't want to do - trust me).
Maybe a Zope expert can touch up the Zope components. In the end,
I had FCKeditor loaded in Zope (probably a bad idea as well), and
I replaced the connector.py with an alias to a server module.
Right now, all Zope components will simple remain as is because
I've had enough of Zope.
See notes right at the end of this file for how I aliased out of Zope.
Anyway, most of you probably wont use Zope, so things are pretty
simple in that regard.
Typically, SERVER_DIR is the root of WEB_DIR (not necessarily).
Most definitely, SERVER_USERFILES_DIR points to WEB_USERFILES_DIR.
"""
import cgi
import re
import os
import string
"""
escape
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
def escape(text, replace=string.replace):
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
"""
getFCKeditorConnector
Creates a new instance of an FCKeditorConnector, and runs it
"""
def getFCKeditorConnector(context=None):
# Called from Zope. Passes the context through
connector = FCKeditorConnector(context=context)
return connector.run()
"""
FCKeditorRequest
A wrapper around the request object
Can handle normal CGI request, or a Zope request
Extend as required
"""
class FCKeditorRequest(object):
def __init__(self, context=None):
if (context is not None):
r = context.REQUEST
else:
r = cgi.FieldStorage()
self.context = context
self.request = r
def isZope(self):
if (self.context is not None):
return True
return False
def has_key(self, key):
return self.request.has_key(key)
def get(self, key, default=None):
value = None
if (self.isZope()):
value = self.request.get(key, default)
else:
if key in self.request.keys():
value = self.request[key].value
else:
value = default
return value
"""
FCKeditorConnector
The connector class
"""
class FCKeditorConnector(object):
# Configuration for FCKEditor
# can point to another server here, if linked correctly
#WEB_HOST = "http://127.0.0.1/"
WEB_HOST = ""
SERVER_DIR = "/var/www/html/"
WEB_USERFILES_FOLDER = WEB_HOST + "upload/"
SERVER_USERFILES_FOLDER = SERVER_DIR + "upload/"
# Allow access (Zope)
__allow_access_to_unprotected_subobjects__ = 1
# Class Attributes
parentFolderRe = re.compile("[\/][^\/]+[\/]?$")
"""
Constructor
"""
def __init__(self, context=None):
# The given root path will NOT be shown to the user
# Only the userFilesPath will be shown
# Instance Attributes
self.context = context
self.request = FCKeditorRequest(context=context)
self.rootPath = self.SERVER_DIR
self.userFilesFolder = self.SERVER_USERFILES_FOLDER
self.webUserFilesFolder = self.WEB_USERFILES_FOLDER
# Enables / Disables the connector
self.enabled = False # Set to True to enable this connector
# These are instance variables
self.zopeRootContext = None
self.zopeUploadContext = None
# Copied from php module =)
self.allowedExtensions = {
"File": None,
"Image": None,
"Flash": None,
"Media": None
}
self.deniedExtensions = {
"File": [ "html","htm","php","php2","php3","php4","php5","phtml","pwml","inc","asp","aspx","ascx","jsp","cfm","cfc","pl","bat","exe","com","dll","vbs","js","reg","cgi","htaccess","asis" ],
"Image": [ "html","htm","php","php2","php3","php4","php5","phtml","pwml","inc","asp","aspx","ascx","jsp","cfm","cfc","pl","bat","exe","com","dll","vbs","js","reg","cgi","htaccess","asis" ],
"Flash": [ "html","htm","php","php2","php3","php4","php5","phtml","pwml","inc","asp","aspx","ascx","jsp","cfm","cfc","pl","bat","exe","com","dll","vbs","js","reg","cgi","htaccess","asis" ],
"Media": [ "html","htm","php","php2","php3","php4","php5","phtml","pwml","inc","asp","aspx","ascx","jsp","cfm","cfc","pl","bat","exe","com","dll","vbs","js","reg","cgi","htaccess","asis" ]
}
"""
Zope specific functions
"""
def isZope(self):
# The context object is the zope object
if (self.context is not None):
return True
return False
def getZopeRootContext(self):
if self.zopeRootContext is None:
self.zopeRootContext = self.context.getPhysicalRoot()
return self.zopeRootContext
def getZopeUploadContext(self):
if self.zopeUploadContext is None:
folderNames = self.userFilesFolder.split("/")
c = self.getZopeRootContext()
for folderName in folderNames:
if (folderName <> ""):
c = c[folderName]
self.zopeUploadContext = c
return self.zopeUploadContext
"""
Generic manipulation functions
"""
def getUserFilesFolder(self):
return self.userFilesFolder
def getWebUserFilesFolder(self):
return self.webUserFilesFolder
def getAllowedExtensions(self, resourceType):
return self.allowedExtensions[resourceType]
def getDeniedExtensions(self, resourceType):
return self.deniedExtensions[resourceType]
def removeFromStart(self, string, char):
return string.lstrip(char)
def removeFromEnd(self, string, char):
return string.rstrip(char)
def convertToXmlAttribute(self, value):
if (value is None):
value = ""
return escape(value)
def convertToPath(self, path):
if (path[-1] <> "/"):
return path + "/"
else:
return path
def getUrlFromPath(self, resourceType, path):
if (resourceType is None) or (resourceType == ''):
url = "%s%s" % (
self.removeFromEnd(self.getUserFilesFolder(), '/'),
path
)
else:
url = "%s%s%s" % (
self.getUserFilesFolder(),
resourceType,
path
)
return url
def getWebUrlFromPath(self, resourceType, path):
if (resourceType is None) or (resourceType == ''):
url = "%s%s" % (
self.removeFromEnd(self.getWebUserFilesFolder(), '/'),
path
)
else:
url = "%s%s%s" % (
self.getWebUserFilesFolder(),
resourceType,
path
)
return url
def removeExtension(self, fileName):
index = fileName.rindex(".")
newFileName = fileName[0:index]
return newFileName
def getExtension(self, fileName):
index = fileName.rindex(".") + 1
fileExtension = fileName[index:]
return fileExtension
def getParentFolder(self, folderPath):
parentFolderPath = self.parentFolderRe.sub('', folderPath)
return parentFolderPath
"""
serverMapFolder
Purpose: works out the folder map on the server
"""
def serverMapFolder(self, resourceType, folderPath):
# Get the resource type directory
resourceTypeFolder = "%s%s/" % (
self.getUserFilesFolder(),
resourceType
)
# Ensure that the directory exists
self.createServerFolder(resourceTypeFolder)
# Return the resource type directory combined with the
# required path
return "%s%s" % (
resourceTypeFolder,
self.removeFromStart(folderPath, '/')
)
"""
createServerFolder
Purpose: physically creates a folder on the server
"""
def createServerFolder(self, folderPath):
# Check if the parent exists
parentFolderPath = self.getParentFolder(folderPath)
if not(os.path.exists(parentFolderPath)):
errorMsg = self.createServerFolder(parentFolderPath)
if errorMsg is not None:
return errorMsg
# Check if this exists
if not(os.path.exists(folderPath)):
os.mkdir(folderPath)
os.chmod(folderPath, 0755)
errorMsg = None
else:
if os.path.isdir(folderPath):
errorMsg = None
else:
raise "createServerFolder: Non-folder of same name already exists"
return errorMsg
"""
getRootPath
Purpose: returns the root path on the server
"""
def getRootPath(self):
return self.rootPath
"""
setXmlHeaders
Purpose: to prepare the headers for the xml to return
"""
def setXmlHeaders(self):
#now = self.context.BS_get_now()
#yesterday = now - 1
self.setHeader("Content-Type", "text/xml")
#self.setHeader("Expires", yesterday)
#self.setHeader("Last-Modified", now)
#self.setHeader("Cache-Control", "no-store, no-cache, must-revalidate")
self.printHeaders()
return
def setHeader(self, key, value):
if (self.isZope()):
self.context.REQUEST.RESPONSE.setHeader(key, value)
else:
print "%s: %s" % (key, value)
return
def printHeaders(self):
# For non-Zope requests, we need to print an empty line
# to denote the end of headers
if (not(self.isZope())):
print ""
"""
createXmlFooter
Purpose: returns the xml header
"""
def createXmlHeader(self, command, resourceType, currentFolder):
self.setXmlHeaders()
s = ""
# Create the XML document header
s += """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
self.convertToXmlAttribute(currentFolder),
self.convertToXmlAttribute(
self.getWebUrlFromPath(
resourceType,
currentFolder
)
),
)
return s
"""
createXmlFooter
Purpose: returns the xml footer
"""
def createXmlFooter(self):
s = """</Connector>"""
return s
"""
sendError
Purpose: in the event of an error, return an xml based error
"""
def sendError(self, number, text):
self.setXmlHeaders()
s = ""
# Create the XML document header
s += """<?xml version="1.0" encoding="utf-8" ?>"""
s += """<Connector>"""
s += """<Error number="%s" text="%s" />""" % (number, text)
s += """</Connector>"""
return s
"""
getFolders
Purpose: command to recieve a list of folders
"""
def getFolders(self, resourceType, currentFolder):
if (self.isZope()):
return self.getZopeFolders(resourceType, currentFolder)
else:
return self.getNonZopeFolders(resourceType, currentFolder)
def getZopeFolders(self, resourceType, currentFolder):
# Open the folders node
s = ""
s += """<Folders>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["Folder"]):
s += """<Folder name="%s" />""" % (
self.convertToXmlAttribute(name)
)
# Close the folders node
s += """</Folders>"""
return s
def getNonZopeFolders(self, resourceType, currentFolder):
# Map the virtual path to our local server
serverPath = self.serverMapFolder(resourceType, currentFolder)
# Open the folders node
s = ""
s += """<Folders>"""
for someObject in os.listdir(serverPath):
someObjectPath = os.path.join(serverPath, someObject)
if os.path.isdir(someObjectPath):
s += """<Folder name="%s" />""" % (
self.convertToXmlAttribute(someObject)
)
# Close the folders node
s += """</Folders>"""
return s
"""
getFoldersAndFiles
Purpose: command to recieve a list of folders and files
"""
def getFoldersAndFiles(self, resourceType, currentFolder):
if (self.isZope()):
return self.getZopeFoldersAndFiles(resourceType, currentFolder)
else:
return self.getNonZopeFoldersAndFiles(resourceType, currentFolder)
def getNonZopeFoldersAndFiles(self, resourceType, currentFolder):
# Map the virtual path to our local server
serverPath = self.serverMapFolder(resourceType, currentFolder)
# Open the folders / files node
folders = """<Folders>"""
files = """<Files>"""
for someObject in os.listdir(serverPath):
someObjectPath = os.path.join(serverPath, someObject)
if os.path.isdir(someObjectPath):
folders += """<Folder name="%s" />""" % (
self.convertToXmlAttribute(someObject)
)
elif os.path.isfile(someObjectPath):
size = os.path.getsize(someObjectPath)
files += """<File name="%s" size="%s" />""" % (
self.convertToXmlAttribute(someObject),
os.path.getsize(someObjectPath)
)
# Close the folders / files node
folders += """</Folders>"""
files += """</Files>"""
# Return it
s = folders + files
return s
def getZopeFoldersAndFiles(self, resourceType, currentFolder):
folders = self.getZopeFolders(resourceType, currentFolder)
files = self.getZopeFiles(resourceType, currentFolder)
s = folders + files
return s
def getZopeFiles(self, resourceType, currentFolder):
# Open the files node
s = ""
s += """<Files>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["File","Image"]):
s += """<File name="%s" size="%s" />""" % (
self.convertToXmlAttribute(name),
((o.get_size() / 1024) + 1)
)
# Close the files node
s += """</Files>"""
return s
def findZopeFolder(self, resourceType, folderName):
# returns the context of the resource / folder
zopeFolder = self.getZopeUploadContext()
folderName = self.removeFromStart(folderName, "/")
folderName = self.removeFromEnd(folderName, "/")
if (resourceType <> ""):
try:
zopeFolder = zopeFolder[resourceType]
except:
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=resourceType, title=resourceType)
zopeFolder = zopeFolder[resourceType]
if (folderName <> ""):
folderNames = folderName.split("/")
for folderName in folderNames:
zopeFolder = zopeFolder[folderName]
return zopeFolder
"""
createFolder
Purpose: command to create a new folder
"""
def createFolder(self, resourceType, currentFolder):
if (self.isZope()):
return self.createZopeFolder(resourceType, currentFolder)
else:
return self.createNonZopeFolder(resourceType, currentFolder)
def createZopeFolder(self, resourceType, currentFolder):
# Find out where we are
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=newFolder, title=newFolder)
else:
errorNo = 102
error = """<Error number="%s" originalDescription="%s" />""" % (
errorNo,
self.convertToXmlAttribute(errorMsg)
)
return error
def createNonZopeFolder(self, resourceType, currentFolder):
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
currentFolderPath = self.serverMapFolder(
resourceType,
currentFolder
)
try:
newFolderPath = currentFolderPath + newFolder
errorMsg = self.createServerFolder(newFolderPath)
if (errorMsg is not None):
errorNo = 110
except:
errorNo = 103
else:
errorNo = 102
error = """<Error number="%s" originalDescription="%s" />""" % (
errorNo,
self.convertToXmlAttribute(errorMsg)
)
return error
"""
getFileName
Purpose: helper function to extrapolate the filename
"""
def getFileName(self, filename):
for splitChar in ["/", "\\"]:
array = filename.split(splitChar)
if (len(array) > 1):
filename = array[-1]
return filename
"""
fileUpload
Purpose: command to upload files to server
"""
def fileUpload(self, resourceType, currentFolder):
if (self.isZope()):
return self.zopeFileUpload(resourceType, currentFolder)
else:
return self.nonZopeFileUpload(resourceType, currentFolder)
def zopeFileUpload(self, resourceType, currentFolder, count=None):
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
file = self.request.get("NewFile", None)
fileName = self.getFileName(file.filename)
fileNameOnly = self.removeExtension(fileName)
fileExtension = self.getExtension(fileName).lower()
if (count):
nid = "%s.%s.%s" % (fileNameOnly, count, fileExtension)
else:
nid = fileName
title = nid
try:
zopeFolder.manage_addProduct['OFSP'].manage_addFile(
id=nid,
title=title,
file=file.read()
)
except:
if (count):
count += 1
else:
count = 1
self.zopeFileUpload(resourceType, currentFolder, count)
return
def nonZopeFileUpload(self, resourceType, currentFolder):
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFile"):
# newFile has all the contents we need
newFile = self.request.get("NewFile", "")
# Get the file name
newFileName = newFile.filename
newFileNameOnly = self.removeExtension(newFileName)
newFileExtension = self.getExtension(newFileName).lower()
allowedExtensions = self.getAllowedExtensions(resourceType)
deniedExtensions = self.getDeniedExtensions(resourceType)
if (allowedExtensions is not None):
# Check for allowed
isAllowed = False
if (newFileExtension in allowedExtensions):
isAllowed = True
elif (deniedExtensions is not None):
# Check for denied
isAllowed = True
if (newFileExtension in deniedExtensions):
isAllowed = False
else:
# No extension limitations
isAllowed = True
if (isAllowed):
if (self.isZope()):
# Upload into zope
self.zopeFileUpload(resourceType, currentFolder)
else:
# Upload to operating system
# Map the virtual path to the local server path
currentFolderPath = self.serverMapFolder(
resourceType,
currentFolder
)
i = 0
while (True):
newFilePath = "%s%s" % (
currentFolderPath,
newFileName
)
if os.path.exists(newFilePath):
i += 1
newFilePath = "%s%s(%s).%s" % (
currentFolderPath,
newFileNameOnly,
i,
newFileExtension
)
errorNo = 201
break
else:
fileHandle = open(newFilePath,'w')
linecount = 0
while (1):
#line = newFile.file.readline()
line = newFile.readline()
if not line: break
fileHandle.write("%s" % line)
linecount += 1
os.chmod(newFilePath, 0777)
break
else:
newFileName = "Extension not allowed"
errorNo = 203
else:
newFileName = "No File"
errorNo = 202
string = """
<script type="text/javascript">
window.parent.frames["frmUpload"].OnUploadCompleted(%s,"%s");
</script>
""" % (
errorNo,
newFileName.replace('"',"'")
)
return string
def run(self):
s = ""
try:
# Check if this is disabled
if not(self.enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations and try again")
# Make sure we have valid inputs
if not(
(self.request.has_key("Command")) and
(self.request.has_key("Type")) and
(self.request.has_key("CurrentFolder"))
):
return
# Get command
command = self.request.get("Command", None)
# Get resource type
resourceType = self.request.get("Type", None)
# folder syntax must start and end with "/"
currentFolder = self.request.get("CurrentFolder", None)
if (currentFolder[-1] <> "/"):
currentFolder += "/"
if (currentFolder[0] <> "/"):
currentFolder = "/" + currentFolder
# Check for invalid paths
if (".." in currentFolder):
return self.sendError(102, "")
# File upload doesn't have to return XML, so intercept
# her:e
if (command == "FileUpload"):
return self.fileUpload(resourceType, currentFolder)
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder)
# Execute the command
if (command == "GetFolders"):
f = self.getFolders
elif (command == "GetFoldersAndFiles"):
f = self.getFoldersAndFiles
elif (command == "CreateFolder"):
f = self.createFolder
else:
f = None
if (f is not None):
s += f(resourceType, currentFolder)
s += self.createXmlFooter()
except Exception, e:
s = "ERROR: %s" % e
return s
# Running from command line
if __name__ == '__main__':
# To test the output, uncomment the standard headers
#print "Content-Type: text/html"
#print ""
print getFCKeditorConnector()
"""
Running from zope, you will need to modify this connector.
If you have uploaded the FCKeditor into Zope (like me), you need to
move this connector out of Zope, and replace the "connector" with an
alias as below. The key to it is to pass the Zope context in, as
we then have a like to the Zope context.
## Script (Python) "connector.py"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters=*args, **kws
##title=ALIAS
##
import Products.connector as connector
return connector.getFCKeditorConnector(context=context).run()
"""
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python.
Tested With:
Standard:
Python 2.3.3
Zope:
Zope Version: (Zope 2.8.1-final, python 2.3.5, linux2)
Python Version: 2.3.5 (#4, Mar 10 2005, 01:40:25)
[GCC 3.3.3 20040412 (Red Hat Linux 3.3.3-7)]
System Platform: linux2
"""
"""
Author Notes (04 December 2005):
This module has gone through quite a few phases of change. Obviously,
I am only supporting that part of the code that I use. Initially
I had the upload directory as a part of zope (ie. uploading files
directly into Zope), before realising that there were too many
complex intricacies within Zope to deal with. Zope is one ugly piece
of code. So I decided to complement Zope by an Apache server (which
I had running anyway, and doing nothing). So I mapped all uploads
from an arbitrary server directory to an arbitrary web directory.
All the FCKeditor uploading occurred this way, and I didn't have to
stuff around with fiddling with Zope objects and the like (which are
terribly complex and something you don't want to do - trust me).
Maybe a Zope expert can touch up the Zope components. In the end,
I had FCKeditor loaded in Zope (probably a bad idea as well), and
I replaced the connector.py with an alias to a server module.
Right now, all Zope components will simple remain as is because
I've had enough of Zope.
See notes right at the end of this file for how I aliased out of Zope.
Anyway, most of you probably wont use Zope, so things are pretty
simple in that regard.
Typically, SERVER_DIR is the root of WEB_DIR (not necessarily).
Most definitely, SERVER_USERFILES_DIR points to WEB_USERFILES_DIR.
"""
import cgi
import re
import os
import string
"""
escape
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
def escape(text, replace=string.replace):
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
"""
getFCKeditorConnector
Creates a new instance of an FCKeditorConnector, and runs it
"""
def getFCKeditorConnector(context=None):
# Called from Zope. Passes the context through
connector = FCKeditorConnector(context=context)
return connector.run()
"""
FCKeditorRequest
A wrapper around the request object
Can handle normal CGI request, or a Zope request
Extend as required
"""
class FCKeditorRequest(object):
def __init__(self, context=None):
if (context is not None):
r = context.REQUEST
else:
r = cgi.FieldStorage()
self.context = context
self.request = r
def isZope(self):
if (self.context is not None):
return True
return False
def has_key(self, key):
return self.request.has_key(key)
def get(self, key, default=None):
value = None
if (self.isZope()):
value = self.request.get(key, default)
else:
if key in self.request.keys():
value = self.request[key].value
else:
value = default
return value
"""
FCKeditorConnector
The connector class
"""
class FCKeditorConnector(object):
# Configuration for FCKEditor
# can point to another server here, if linked correctly
#WEB_HOST = "http://127.0.0.1/"
WEB_HOST = ""
SERVER_DIR = "/var/www/html/"
WEB_USERFILES_FOLDER = WEB_HOST + "upload/"
SERVER_USERFILES_FOLDER = SERVER_DIR + "upload/"
# Allow access (Zope)
__allow_access_to_unprotected_subobjects__ = 1
# Class Attributes
parentFolderRe = re.compile("[\/][^\/]+[\/]?$")
"""
Constructor
"""
def __init__(self, context=None):
# The given root path will NOT be shown to the user
# Only the userFilesPath will be shown
# Instance Attributes
self.context = context
self.request = FCKeditorRequest(context=context)
self.rootPath = self.SERVER_DIR
self.userFilesFolder = self.SERVER_USERFILES_FOLDER
self.webUserFilesFolder = self.WEB_USERFILES_FOLDER
# Enables / Disables the connector
self.enabled = False # Set to True to enable this connector
# These are instance variables
self.zopeRootContext = None
self.zopeUploadContext = None
# Copied from php module =)
self.allowedExtensions = {
"File": None,
"Image": None,
"Flash": None,
"Media": None
}
self.deniedExtensions = {
"File": [ "html","htm","php","php2","php3","php4","php5","phtml","pwml","inc","asp","aspx","ascx","jsp","cfm","cfc","pl","bat","exe","com","dll","vbs","js","reg","cgi","htaccess","asis" ],
"Image": [ "html","htm","php","php2","php3","php4","php5","phtml","pwml","inc","asp","aspx","ascx","jsp","cfm","cfc","pl","bat","exe","com","dll","vbs","js","reg","cgi","htaccess","asis" ],
"Flash": [ "html","htm","php","php2","php3","php4","php5","phtml","pwml","inc","asp","aspx","ascx","jsp","cfm","cfc","pl","bat","exe","com","dll","vbs","js","reg","cgi","htaccess","asis" ],
"Media": [ "html","htm","php","php2","php3","php4","php5","phtml","pwml","inc","asp","aspx","ascx","jsp","cfm","cfc","pl","bat","exe","com","dll","vbs","js","reg","cgi","htaccess","asis" ]
}
"""
Zope specific functions
"""
def isZope(self):
# The context object is the zope object
if (self.context is not None):
return True
return False
def getZopeRootContext(self):
if self.zopeRootContext is None:
self.zopeRootContext = self.context.getPhysicalRoot()
return self.zopeRootContext
def getZopeUploadContext(self):
if self.zopeUploadContext is None:
folderNames = self.userFilesFolder.split("/")
c = self.getZopeRootContext()
for folderName in folderNames:
if (folderName <> ""):
c = c[folderName]
self.zopeUploadContext = c
return self.zopeUploadContext
"""
Generic manipulation functions
"""
def getUserFilesFolder(self):
return self.userFilesFolder
def getWebUserFilesFolder(self):
return self.webUserFilesFolder
def getAllowedExtensions(self, resourceType):
return self.allowedExtensions[resourceType]
def getDeniedExtensions(self, resourceType):
return self.deniedExtensions[resourceType]
def removeFromStart(self, string, char):
return string.lstrip(char)
def removeFromEnd(self, string, char):
return string.rstrip(char)
def convertToXmlAttribute(self, value):
if (value is None):
value = ""
return escape(value)
def convertToPath(self, path):
if (path[-1] <> "/"):
return path + "/"
else:
return path
def getUrlFromPath(self, resourceType, path):
if (resourceType is None) or (resourceType == ''):
url = "%s%s" % (
self.removeFromEnd(self.getUserFilesFolder(), '/'),
path
)
else:
url = "%s%s%s" % (
self.getUserFilesFolder(),
resourceType,
path
)
return url
def getWebUrlFromPath(self, resourceType, path):
if (resourceType is None) or (resourceType == ''):
url = "%s%s" % (
self.removeFromEnd(self.getWebUserFilesFolder(), '/'),
path
)
else:
url = "%s%s%s" % (
self.getWebUserFilesFolder(),
resourceType,
path
)
return url
def removeExtension(self, fileName):
index = fileName.rindex(".")
newFileName = fileName[0:index]
return newFileName
def getExtension(self, fileName):
index = fileName.rindex(".") + 1
fileExtension = fileName[index:]
return fileExtension
def getParentFolder(self, folderPath):
parentFolderPath = self.parentFolderRe.sub('', folderPath)
return parentFolderPath
"""
serverMapFolder
Purpose: works out the folder map on the server
"""
def serverMapFolder(self, resourceType, folderPath):
# Get the resource type directory
resourceTypeFolder = "%s%s/" % (
self.getUserFilesFolder(),
resourceType
)
# Ensure that the directory exists
self.createServerFolder(resourceTypeFolder)
# Return the resource type directory combined with the
# required path
return "%s%s" % (
resourceTypeFolder,
self.removeFromStart(folderPath, '/')
)
"""
createServerFolder
Purpose: physically creates a folder on the server
"""
def createServerFolder(self, folderPath):
# Check if the parent exists
parentFolderPath = self.getParentFolder(folderPath)
if not(os.path.exists(parentFolderPath)):
errorMsg = self.createServerFolder(parentFolderPath)
if errorMsg is not None:
return errorMsg
# Check if this exists
if not(os.path.exists(folderPath)):
os.mkdir(folderPath)
os.chmod(folderPath, 0755)
errorMsg = None
else:
if os.path.isdir(folderPath):
errorMsg = None
else:
raise "createServerFolder: Non-folder of same name already exists"
return errorMsg
"""
getRootPath
Purpose: returns the root path on the server
"""
def getRootPath(self):
return self.rootPath
"""
setXmlHeaders
Purpose: to prepare the headers for the xml to return
"""
def setXmlHeaders(self):
#now = self.context.BS_get_now()
#yesterday = now - 1
self.setHeader("Content-Type", "text/xml")
#self.setHeader("Expires", yesterday)
#self.setHeader("Last-Modified", now)
#self.setHeader("Cache-Control", "no-store, no-cache, must-revalidate")
self.printHeaders()
return
def setHeader(self, key, value):
if (self.isZope()):
self.context.REQUEST.RESPONSE.setHeader(key, value)
else:
print "%s: %s" % (key, value)
return
def printHeaders(self):
# For non-Zope requests, we need to print an empty line
# to denote the end of headers
if (not(self.isZope())):
print ""
"""
createXmlFooter
Purpose: returns the xml header
"""
def createXmlHeader(self, command, resourceType, currentFolder):
self.setXmlHeaders()
s = ""
# Create the XML document header
s += """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
self.convertToXmlAttribute(currentFolder),
self.convertToXmlAttribute(
self.getWebUrlFromPath(
resourceType,
currentFolder
)
),
)
return s
"""
createXmlFooter
Purpose: returns the xml footer
"""
def createXmlFooter(self):
s = """</Connector>"""
return s
"""
sendError
Purpose: in the event of an error, return an xml based error
"""
def sendError(self, number, text):
self.setXmlHeaders()
s = ""
# Create the XML document header
s += """<?xml version="1.0" encoding="utf-8" ?>"""
s += """<Connector>"""
s += """<Error number="%s" text="%s" />""" % (number, text)
s += """</Connector>"""
return s
"""
getFolders
Purpose: command to recieve a list of folders
"""
def getFolders(self, resourceType, currentFolder):
if (self.isZope()):
return self.getZopeFolders(resourceType, currentFolder)
else:
return self.getNonZopeFolders(resourceType, currentFolder)
def getZopeFolders(self, resourceType, currentFolder):
# Open the folders node
s = ""
s += """<Folders>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["Folder"]):
s += """<Folder name="%s" />""" % (
self.convertToXmlAttribute(name)
)
# Close the folders node
s += """</Folders>"""
return s
def getNonZopeFolders(self, resourceType, currentFolder):
# Map the virtual path to our local server
serverPath = self.serverMapFolder(resourceType, currentFolder)
# Open the folders node
s = ""
s += """<Folders>"""
for someObject in os.listdir(serverPath):
someObjectPath = os.path.join(serverPath, someObject)
if os.path.isdir(someObjectPath):
s += """<Folder name="%s" />""" % (
self.convertToXmlAttribute(someObject)
)
# Close the folders node
s += """</Folders>"""
return s
"""
getFoldersAndFiles
Purpose: command to recieve a list of folders and files
"""
def getFoldersAndFiles(self, resourceType, currentFolder):
if (self.isZope()):
return self.getZopeFoldersAndFiles(resourceType, currentFolder)
else:
return self.getNonZopeFoldersAndFiles(resourceType, currentFolder)
def getNonZopeFoldersAndFiles(self, resourceType, currentFolder):
# Map the virtual path to our local server
serverPath = self.serverMapFolder(resourceType, currentFolder)
# Open the folders / files node
folders = """<Folders>"""
files = """<Files>"""
for someObject in os.listdir(serverPath):
someObjectPath = os.path.join(serverPath, someObject)
if os.path.isdir(someObjectPath):
folders += """<Folder name="%s" />""" % (
self.convertToXmlAttribute(someObject)
)
elif os.path.isfile(someObjectPath):
size = os.path.getsize(someObjectPath)
files += """<File name="%s" size="%s" />""" % (
self.convertToXmlAttribute(someObject),
os.path.getsize(someObjectPath)
)
# Close the folders / files node
folders += """</Folders>"""
files += """</Files>"""
# Return it
s = folders + files
return s
def getZopeFoldersAndFiles(self, resourceType, currentFolder):
folders = self.getZopeFolders(resourceType, currentFolder)
files = self.getZopeFiles(resourceType, currentFolder)
s = folders + files
return s
def getZopeFiles(self, resourceType, currentFolder):
# Open the files node
s = ""
s += """<Files>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["File","Image"]):
s += """<File name="%s" size="%s" />""" % (
self.convertToXmlAttribute(name),
((o.get_size() / 1024) + 1)
)
# Close the files node
s += """</Files>"""
return s
def findZopeFolder(self, resourceType, folderName):
# returns the context of the resource / folder
zopeFolder = self.getZopeUploadContext()
folderName = self.removeFromStart(folderName, "/")
folderName = self.removeFromEnd(folderName, "/")
if (resourceType <> ""):
try:
zopeFolder = zopeFolder[resourceType]
except:
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=resourceType, title=resourceType)
zopeFolder = zopeFolder[resourceType]
if (folderName <> ""):
folderNames = folderName.split("/")
for folderName in folderNames:
zopeFolder = zopeFolder[folderName]
return zopeFolder
"""
createFolder
Purpose: command to create a new folder
"""
def createFolder(self, resourceType, currentFolder):
if (self.isZope()):
return self.createZopeFolder(resourceType, currentFolder)
else:
return self.createNonZopeFolder(resourceType, currentFolder)
def createZopeFolder(self, resourceType, currentFolder):
# Find out where we are
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=newFolder, title=newFolder)
else:
errorNo = 102
error = """<Error number="%s" originalDescription="%s" />""" % (
errorNo,
self.convertToXmlAttribute(errorMsg)
)
return error
def createNonZopeFolder(self, resourceType, currentFolder):
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
currentFolderPath = self.serverMapFolder(
resourceType,
currentFolder
)
try:
newFolderPath = currentFolderPath + newFolder
errorMsg = self.createServerFolder(newFolderPath)
if (errorMsg is not None):
errorNo = 110
except:
errorNo = 103
else:
errorNo = 102
error = """<Error number="%s" originalDescription="%s" />""" % (
errorNo,
self.convertToXmlAttribute(errorMsg)
)
return error
"""
getFileName
Purpose: helper function to extrapolate the filename
"""
def getFileName(self, filename):
for splitChar in ["/", "\\"]:
array = filename.split(splitChar)
if (len(array) > 1):
filename = array[-1]
return filename
"""
fileUpload
Purpose: command to upload files to server
"""
def fileUpload(self, resourceType, currentFolder):
if (self.isZope()):
return self.zopeFileUpload(resourceType, currentFolder)
else:
return self.nonZopeFileUpload(resourceType, currentFolder)
def zopeFileUpload(self, resourceType, currentFolder, count=None):
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
file = self.request.get("NewFile", None)
fileName = self.getFileName(file.filename)
fileNameOnly = self.removeExtension(fileName)
fileExtension = self.getExtension(fileName).lower()
if (count):
nid = "%s.%s.%s" % (fileNameOnly, count, fileExtension)
else:
nid = fileName
title = nid
try:
zopeFolder.manage_addProduct['OFSP'].manage_addFile(
id=nid,
title=title,
file=file.read()
)
except:
if (count):
count += 1
else:
count = 1
self.zopeFileUpload(resourceType, currentFolder, count)
return
def nonZopeFileUpload(self, resourceType, currentFolder):
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFile"):
# newFile has all the contents we need
newFile = self.request.get("NewFile", "")
# Get the file name
newFileName = newFile.filename
newFileNameOnly = self.removeExtension(newFileName)
newFileExtension = self.getExtension(newFileName).lower()
allowedExtensions = self.getAllowedExtensions(resourceType)
deniedExtensions = self.getDeniedExtensions(resourceType)
if (allowedExtensions is not None):
# Check for allowed
isAllowed = False
if (newFileExtension in allowedExtensions):
isAllowed = True
elif (deniedExtensions is not None):
# Check for denied
isAllowed = True
if (newFileExtension in deniedExtensions):
isAllowed = False
else:
# No extension limitations
isAllowed = True
if (isAllowed):
if (self.isZope()):
# Upload into zope
self.zopeFileUpload(resourceType, currentFolder)
else:
# Upload to operating system
# Map the virtual path to the local server path
currentFolderPath = self.serverMapFolder(
resourceType,
currentFolder
)
i = 0
while (True):
newFilePath = "%s%s" % (
currentFolderPath,
newFileName
)
if os.path.exists(newFilePath):
i += 1
newFilePath = "%s%s(%s).%s" % (
currentFolderPath,
newFileNameOnly,
i,
newFileExtension
)
errorNo = 201
break
else:
fileHandle = open(newFilePath,'w')
linecount = 0
while (1):
#line = newFile.file.readline()
line = newFile.readline()
if not line: break
fileHandle.write("%s" % line)
linecount += 1
os.chmod(newFilePath, 0777)
break
else:
newFileName = "Extension not allowed"
errorNo = 203
else:
newFileName = "No File"
errorNo = 202
string = """
<script type="text/javascript">
window.parent.frames["frmUpload"].OnUploadCompleted(%s,"%s");
</script>
""" % (
errorNo,
newFileName.replace('"',"'")
)
return string
def run(self):
s = ""
try:
# Check if this is disabled
if not(self.enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations and try again")
# Make sure we have valid inputs
if not(
(self.request.has_key("Command")) and
(self.request.has_key("Type")) and
(self.request.has_key("CurrentFolder"))
):
return
# Get command
command = self.request.get("Command", None)
# Get resource type
resourceType = self.request.get("Type", None)
# folder syntax must start and end with "/"
currentFolder = self.request.get("CurrentFolder", None)
if (currentFolder[-1] <> "/"):
currentFolder += "/"
if (currentFolder[0] <> "/"):
currentFolder = "/" + currentFolder
# Check for invalid paths
if (".." in currentFolder):
return self.sendError(102, "")
# File upload doesn't have to return XML, so intercept
# her:e
if (command == "FileUpload"):
return self.fileUpload(resourceType, currentFolder)
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder)
# Execute the command
if (command == "GetFolders"):
f = self.getFolders
elif (command == "GetFoldersAndFiles"):
f = self.getFoldersAndFiles
elif (command == "CreateFolder"):
f = self.createFolder
else:
f = None
if (f is not None):
s += f(resourceType, currentFolder)
s += self.createXmlFooter()
except Exception, e:
s = "ERROR: %s" % e
return s
# Running from command line
if __name__ == '__main__':
# To test the output, uncomment the standard headers
#print "Content-Type: text/html"
#print ""
print getFCKeditorConnector()
"""
Running from zope, you will need to modify this connector.
If you have uploaded the FCKeditor into Zope (like me), you need to
move this connector out of Zope, and replace the "connector" with an
alias as below. The key to it is to pass the Zope context in, as
we then have a like to the Zope context.
## Script (Python) "connector.py"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters=*args, **kws
##title=ALIAS
##
import Products.connector as connector
return connector.getFCKeditorConnector(context=context).run()
"""
| Python |
#!/usr/bin/python
# author: zdzhjx@gmail.com
# version: 0.1
import os
import shutil
import random
from hashlib import md5
import threading
import traceback
class FileDic():
def __init__( self, tmdir='.', name=None ):
if not name:
name = str(1000000*random.random())
self.m_name = name
self.m_dir = tmdir + '/' + self.m_name
try:
os.mkdir( self.m_dir )
except:
traceback.print_exc()
self.m_dic = {}
self.m_lock = threading.Lock()
def __savedata( self, k,data ):
# save the real data to file
try:
fname = self.__keymapfile( k )
try:
os.mkdir( os.path.dirname( fname ) )
except:
pass
fd = open( fname, "w" )
fd.write( data )
fd.close()
except:
traceback.print_exc()
return False
return True
def __readdata( self, k ):
# get real data from file
data = ""
try:
fname = self.__keymapfile( k )
fd = open( fname, "r" )
data = fd.read()
fd.close()
except:
traceback.print_exc()
return data
def __keymapfile( self, k ):
# get file full path
f = md5(k).hexdigest()
return self.m_dir + '/' + f[:2] + '/' + f[2:]
def popitem( self ):
# pop one element
try:
self.m_lock.acquire()
k, key = self.m_dic.popitem()
data = self.__readdata( k )
#
fname = self.__keymapfile(k)
os.unlink( fname )
except:
traceback.print_exc()
import pdb
pdb.set_trace()
k = None
data = None
finally:
self.m_lock.release()
return ( k, data )
def push( self, k , v ):
# push a element
try:
self.m_lock.acquire()
if self.__savedata( k, v ):
self.m_dic[ k ] = 1
except:
traceback.print_exc()
return False
finally:
self.m_lock.release()
return True
def getele( self, k ):
# return data by k
try:
self.m_lock.acquire()
data = self.__readdata( k )
except:
traceback.print_exc()
finally:
self.m_lock.release()
return data
def update( self, k , v ):
# update v by k
return self.push( k, v )
def has_key( self, k ):
# k in dic or not
if k in self.m_dic:
return True
else:
return False
def qsize( self ):
# return size of the FileDic
return len(self.m_dic)
# for compitable of dic
def __setitem__( self, k, v ):
self.push( k, v )
return
def __getitem__( self, k ):
data = self.getele( k )
return data
def __delitem__( self, k ):
if self.has_key( k ):
try:
self.m_lock.acquire()
os.remove( self.__keymapfile(k) )
del self.m_dic[ k ]
except:
traceback.print_exc()
finally:
self.m_lock.release()
def __len__( self ):
return len( self.m_dic )
def keys( self ):
return self.m_dic.keys()
def __del__( self ):
try:
shutil.rmtree( self.m_dir )
except:
traceback.print_exc()
if __name__ == '__main__':
# for test
q = FileDic()
print 'push a', q.push( 'a', '100' )
print 'del a', q.getele('a')
print 'popitem ',q.popitem()
q['aa'] = '10000'
print 'set aa'
print 'aa', q['aa']
del q['aa']
print 'del aa'
print 'dump keys', q.keys()
q['ok'] = 'aaaaa'
print q.keys()
| Python |
key='\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
'
data='\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd\
dddddddddddddddddddddddddddddddddddddddddddddddddd'
| Python |
#!/usr/bin/python
# author: zdzhjx@gmail.com
# version: 0.1
import os
import shutil
import random
from hashlib import md5
import threading
import traceback
import bsddb
class FileDic():
def __init__( self, tmdir='/tmp', name=None ):
if not name:
name = str(1000000*random.random())
self.m_name = name
self.m_dir = tmdir + '/' + self.m_name
try:
#ios.mkdir( self.m_dir )
pass
except:
traceback.print_exc()
self.m_dic = {}
self.m_lock = threading.Lock()
self.m_db = bsddb.hashopen( self.m_dir )
def __savedata( self, k,data ):
# save the real data to file
try:
self.m_dic[k] = self.__keymapfile( k )
self.m_db[ self.m_dic[k] ] = data
except:
traceback.print_exc()
return False
return True
def __readdata( self, k,key, remove=False ):
# get real data from file
data = ""
try:
data = self.m_db[ key ]
if remove:
del self.m_db[ key ]
except:
traceback.print_exc()
return data
def __keymapfile( self, k ):
# get file full path
f = md5(k).hexdigest()
return f
return self.m_dir + '/' + f[:2] + '/' + f[2:]
def popitem( self ):
# pop one element
try:
self.m_lock.acquire()
k, key = self.m_dic.popitem()
#
data = self.__readdata( k,key, remove=True )
except:
traceback.print_exc()
import pdb
pdb.set_trace()
k = None
data = None
finally:
self.m_lock.release()
return ( k, data )
def push( self, k , v ):
# push a element
try:
self.m_lock.acquire()
self.__savedata( k, v )
except:
traceback.print_exc()
return False
finally:
self.m_lock.release()
return True
def getele( self, k ):
# return data by k
try:
self.m_lock.acquire()
data = self.__readdata( k, self.m_dic[k] )
except:
traceback.print_exc()
finally:
self.m_lock.release()
return data
def update( self, k , v ):
# update v by k
return self.push( k, v )
def has_key( self, k ):
# k in dic or not
if k in self.m_dic:
return True
else:
return False
def bsize( self ):
# return size of the FileDic
return len(self.m_dic)
# for compitable of dic
def __setitem__( self, k, v ):
self.push( k, v )
return
def __getitem__( self, k ):
data = self.getele( k )
return data
def __delitem__( self, k ):
if self.has_key( k ):
try:
self.m_lock.acquire()
#os.remove( self.__keymapfile(k) )
del self.m_db[ self.m_dic[k] ]
del self.m_dic[ k ]
except:
traceback.print_exc()
finally:
self.m_lock.release()
def __len__( self ):
return len( self.m_dic )
def keys( self ):
return self.m_dic.keys()
def __del__( self ):
try:
self.m_db.close()
#shutil.rmtree( self.m_dir )
except:
traceback.print_exc()
if __name__ == '__main__':
# for test
import pdb
q = FileDic()
print 'push a', q.push( 'a', '100' )
#print 'push ab', q.push( 'ab', '100' )
#pdb.set_trace()
print 'del a', q.getele('a')
print q.m_dic, q.m_db, 'KKKK'
print 'popitem ',q.popitem()
q['aa'] = '10000'
print 'set aa'
print 'aa', q['aa']
del q['aa']
print 'del aa'
print 'dump keys', q.keys()
q['ok'] = 'aaaaa'
print q.keys()
| Python |
#!/usr/bin/python
# author: zdzhjx@gmail.com
# version: 0.1
import os
import shutil
import random
from hashlib import md5
import threading
import traceback
class FileDic():
def __init__( self, tmdir='.', name=None ):
if not name:
name = str(1000000*random.random())
self.m_name = name
self.m_dir = tmdir + '/' + self.m_name
try:
os.mkdir( self.m_dir )
except:
traceback.print_exc()
self.m_dic = {}
self.m_lock = threading.Lock()
def __savedata( self, k,data ):
# save the real data to file
try:
fname = self.__keymapfile( k )
try:
os.mkdir( os.path.dirname( fname ) )
except:
pass
fd = open( fname, "w" )
fd.write( data )
fd.close()
except:
traceback.print_exc()
return False
return True
def __readdata( self, k ):
# get real data from file
data = ""
try:
fname = self.__keymapfile( k )
fd = open( fname, "r" )
data = fd.read()
fd.close()
except:
traceback.print_exc()
return data
def __keymapfile( self, k ):
# get file full path
f = md5(k).hexdigest()
return self.m_dir + '/' + f[:2] + '/' + f[2:]
def popitem( self ):
# pop one element
try:
self.m_lock.acquire()
k, key = self.m_dic.popitem()
data = self.__readdata( k )
#
fname = self.__keymapfile(k)
os.unlink( fname )
except:
traceback.print_exc()
import pdb
pdb.set_trace()
k = None
data = None
finally:
self.m_lock.release()
return ( k, data )
def push( self, k , v ):
# push a element
try:
self.m_lock.acquire()
if self.__savedata( k, v ):
self.m_dic[ k ] = 1
except:
traceback.print_exc()
return False
finally:
self.m_lock.release()
return True
def getele( self, k ):
# return data by k
try:
self.m_lock.acquire()
data = self.__readdata( k )
except:
traceback.print_exc()
finally:
self.m_lock.release()
return data
def update( self, k , v ):
# update v by k
return self.push( k, v )
def has_key( self, k ):
# k in dic or not
if k in self.m_dic:
return True
else:
return False
def qsize( self ):
# return size of the FileDic
return len(self.m_dic)
# for compitable of dic
def __setitem__( self, k, v ):
self.push( k, v )
return
def __getitem__( self, k ):
data = self.getele( k )
return data
def __delitem__( self, k ):
if self.has_key( k ):
try:
self.m_lock.acquire()
os.remove( self.__keymapfile(k) )
del self.m_dic[ k ]
except:
traceback.print_exc()
finally:
self.m_lock.release()
def __len__( self ):
return len( self.m_dic )
def keys( self ):
return self.m_dic.keys()
def __del__( self ):
try:
shutil.rmtree( self.m_dir )
except:
traceback.print_exc()
if __name__ == '__main__':
# for test
q = FileDic()
print 'push a', q.push( 'a', '100' )
print 'del a', q.getele('a')
print 'popitem ',q.popitem()
q['aa'] = '10000'
print 'set aa'
print 'aa', q['aa']
del q['aa']
print 'del aa'
print 'dump keys', q.keys()
q['ok'] = 'aaaaa'
print q.keys()
| Python |
import filedic
import time
import random
import testdata
def test( num ):
q = filedic.FileDic()
pre = time.time()
for i in xrange( num ):
k=testdata.key + str( i )
v=testdata.data + str(i)
q[k] = v
used = time.time() - pre
print 'w:', num, used, num/used
pre = time.time()
for i in xrange( num ):
q.popitem()
used = time.time() - pre
print 'r:', num, used, num/used
del q
#nums = [ 10, 100, 1000, ]
nums = [ 1000, 10000, 20000, 50000, 100000 ]
for n in nums:
test( n )
print
| Python |
import bdbdic
import time
import random
import testdata
def test( num ):
q = bdbdic.FileDic()
pre = time.time()
for i in xrange( num ):
k=testdata.key + str( i )
v=testdata.data + str(i)
q[k] = v
used = time.time() - pre
print 'w:', num, used, num/used
pre = time.time()
for i in xrange( num ):
q.popitem()
used = time.time() - pre
print 'r:', num, used, num/used
del q
#nums = [ 10, 100, 1000, ]
nums = [ 1000, 10000, 20000, 50000, 100000 ]
for n in nums:
test( n )
print
| Python |
#!/usr/bin/python
# author: zdzhjx@gmail.com
# version: 0.1
import os
import shutil
import random
from hashlib import md5
import threading
import traceback
import bsddb
class FileDic():
def __init__( self, tmdir='/tmp', name=None ):
if not name:
name = str(1000000*random.random())
self.m_name = name
self.m_dir = tmdir + '/' + self.m_name
try:
#ios.mkdir( self.m_dir )
pass
except:
traceback.print_exc()
self.m_dic = {}
self.m_lock = threading.Lock()
self.m_db = bsddb.hashopen( self.m_dir )
def __savedata( self, k,data ):
# save the real data to file
try:
self.m_dic[k] = self.__keymapfile( k )
self.m_db[ self.m_dic[k] ] = data
except:
traceback.print_exc()
return False
return True
def __readdata( self, k,key, remove=False ):
# get real data from file
data = ""
try:
data = self.m_db[ key ]
if remove:
del self.m_db[ key ]
except:
traceback.print_exc()
return data
def __keymapfile( self, k ):
# get file full path
f = md5(k).hexdigest()
return f
return self.m_dir + '/' + f[:2] + '/' + f[2:]
def popitem( self ):
# pop one element
try:
self.m_lock.acquire()
k, key = self.m_dic.popitem()
#
data = self.__readdata( k,key, remove=True )
except:
traceback.print_exc()
import pdb
pdb.set_trace()
k = None
data = None
finally:
self.m_lock.release()
return ( k, data )
def push( self, k , v ):
# push a element
try:
self.m_lock.acquire()
self.__savedata( k, v )
except:
traceback.print_exc()
return False
finally:
self.m_lock.release()
return True
def getele( self, k ):
# return data by k
try:
self.m_lock.acquire()
data = self.__readdata( k, self.m_dic[k] )
except:
traceback.print_exc()
finally:
self.m_lock.release()
return data
def update( self, k , v ):
# update v by k
return self.push( k, v )
def has_key( self, k ):
# k in dic or not
if k in self.m_dic:
return True
else:
return False
def bsize( self ):
# return size of the FileDic
return len(self.m_dic)
# for compitable of dic
def __setitem__( self, k, v ):
self.push( k, v )
return
def __getitem__( self, k ):
data = self.getele( k )
return data
def __delitem__( self, k ):
if self.has_key( k ):
try:
self.m_lock.acquire()
#os.remove( self.__keymapfile(k) )
del self.m_db[ self.m_dic[k] ]
del self.m_dic[ k ]
except:
traceback.print_exc()
finally:
self.m_lock.release()
def __len__( self ):
return len( self.m_dic )
def keys( self ):
return self.m_dic.keys()
def __del__( self ):
try:
self.m_db.close()
#shutil.rmtree( self.m_dir )
except:
traceback.print_exc()
if __name__ == '__main__':
# for test
import pdb
q = FileDic()
print 'push a', q.push( 'a', '100' )
#print 'push ab', q.push( 'ab', '100' )
#pdb.set_trace()
print 'del a', q.getele('a')
print q.m_dic, q.m_db, 'KKKK'
print 'popitem ',q.popitem()
q['aa'] = '10000'
print 'set aa'
print 'aa', q['aa']
del q['aa']
print 'del aa'
print 'dump keys', q.keys()
q['ok'] = 'aaaaa'
print q.keys()
| Python |
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the integration file for Python.
"""
import cgi
import os
import re
import string
def escape(text, replace=string.replace):
"""Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
text = replace(text, "'", ''')
return text
# The FCKeditor class
class FCKeditor(object):
def __init__(self, instanceName):
self.InstanceName = instanceName
self.BasePath = '/fckeditor/'
self.Width = '100%'
self.Height = '200'
self.ToolbarSet = 'Default'
self.Value = '';
self.Config = {}
def Create(self):
return self.CreateHtml()
def CreateHtml(self):
HtmlValue = escape(self.Value)
Html = "<div>"
if (self.IsCompatible()):
File = "fckeditor.html"
Link = "%seditor/%s?InstanceName=%s" % (
self.BasePath,
File,
self.InstanceName
)
if (self.ToolbarSet is not None):
Link += "&ToolBar=%s" % self.ToolbarSet
# Render the linked hidden field
Html += "<input type=\"hidden\" id=\"%s\" name=\"%s\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.InstanceName,
HtmlValue
)
# Render the configurations hidden field
Html += "<input type=\"hidden\" id=\"%s___Config\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.GetConfigFieldString()
)
# Render the editor iframe
Html += "<iframe id=\"%s\__Frame\" src=\"%s\" width=\"%s\" height=\"%s\" frameborder=\"0\" scrolling=\"no\"></iframe>" % (
self.InstanceName,
Link,
self.Width,
self.Height
)
else:
if (self.Width.find("%%") < 0):
WidthCSS = "%spx" % self.Width
else:
WidthCSS = self.Width
if (self.Height.find("%%") < 0):
HeightCSS = "%spx" % self.Height
else:
HeightCSS = self.Height
Html += "<textarea name=\"%s\" rows=\"4\" cols=\"40\" style=\"width: %s; height: %s;\" wrap=\"virtual\">%s</textarea>" % (
self.InstanceName,
WidthCSS,
HeightCSS,
HtmlValue
)
Html += "</div>"
return Html
def IsCompatible(self):
if (os.environ.has_key("HTTP_USER_AGENT")):
sAgent = os.environ.get("HTTP_USER_AGENT", "")
else:
sAgent = ""
if (sAgent.find("MSIE") >= 0) and (sAgent.find("mac") < 0) and (sAgent.find("Opera") < 0):
i = sAgent.find("MSIE")
iVersion = float(sAgent[i+5:i+5+3])
if (iVersion >= 5.5):
return True
return False
elif (sAgent.find("Gecko/") >= 0):
i = sAgent.find("Gecko/")
iVersion = int(sAgent[i+6:i+6+8])
if (iVersion >= 20030210):
return True
return False
elif (sAgent.find("Opera/") >= 0):
i = sAgent.find("Opera/")
iVersion = float(sAgent[i+6:i+6+4])
if (iVersion >= 9.5):
return True
return False
elif (sAgent.find("AppleWebKit/") >= 0):
p = re.compile('AppleWebKit\/(\d+)', re.IGNORECASE)
m = p.search(sAgent)
if (m.group(1) >= 522):
return True
return False
else:
return False
def GetConfigFieldString(self):
sParams = ""
bFirst = True
for sKey in self.Config.keys():
sValue = self.Config[sKey]
if (not bFirst):
sParams += "&"
else:
bFirst = False
if (sValue):
k = escape(sKey)
v = escape(sValue)
if (sValue == "true"):
sParams += "%s=true" % k
elif (sValue == "false"):
sParams += "%s=false" % k
else:
sParams += "%s=%s" % (k, v)
return sParams
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
import os
try: # Windows needs stdio set for binary mode for file upload to work.
import msvcrt
msvcrt.setmode (0, os.O_BINARY) # stdin = 0
msvcrt.setmode (1, os.O_BINARY) # stdout = 1
except ImportError:
pass
from fckutil import *
from fckoutput import *
import config as Config
class GetFoldersCommandMixin (object):
def getFolders(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
s = """<Folders>""" # Open the folders node
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
s += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
s += """</Folders>""" # Close the folders node
return s
class GetFoldersAndFilesCommandMixin (object):
def getFoldersAndFiles(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders and files
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
# Open the folders / files node
folders = """<Folders>"""
files = """<Files>"""
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
folders += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
elif os.path.isfile(someObjectPath):
size = os.path.getsize(someObjectPath)
files += """<File name="%s" size="%s" />""" % (
convertToXmlAttribute(someObject),
os.path.getsize(someObjectPath)
)
# Close the folders / files node
folders += """</Folders>"""
files += """</Files>"""
return folders + files
class CreateFolderCommandMixin (object):
def createFolder(self, resourceType, currentFolder):
"""
Purpose: command to create a new folder
"""
errorNo = 0; errorMsg ='';
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
newFolder = sanitizeFolderName (newFolder)
try:
newFolderPath = mapServerFolder(self.userFilesFolder, combinePaths(currentFolder, newFolder))
self.createServerFolder(newFolderPath)
except Exception, e:
errorMsg = str(e).decode('iso-8859-1').encode('utf-8') # warning with encodigns!!!
if hasattr(e,'errno'):
if e.errno==17: #file already exists
errorNo=0
elif e.errno==13: # permission denied
errorNo = 103
elif e.errno==36 or e.errno==2 or e.errno==22: # filename too long / no such file / invalid name
errorNo = 102
else:
errorNo = 110
else:
errorNo = 102
return self.sendErrorNode ( errorNo, errorMsg )
def createServerFolder(self, folderPath):
"Purpose: physically creates a folder on the server"
# No need to check if the parent exists, just create all hierachy
oldumask = os.umask(0)
os.makedirs(folderPath,mode=0755)
os.umask( oldumask )
class UploadFileCommandMixin (object):
def uploadFile(self, resourceType, currentFolder):
"""
Purpose: command to upload files to server (same as FileUpload)
"""
errorNo = 0
if self.request.has_key("NewFile"):
# newFile has all the contents we need
newFile = self.request.get("NewFile", "")
# Get the file name
newFileName = newFile.filename
newFileName = sanitizeFileName( newFileName )
newFileNameOnly = removeExtension(newFileName)
newFileExtension = getExtension(newFileName).lower()
allowedExtensions = Config.AllowedExtensions[resourceType]
deniedExtensions = Config.DeniedExtensions[resourceType]
if (allowedExtensions):
# Check for allowed
isAllowed = False
if (newFileExtension in allowedExtensions):
isAllowed = True
elif (deniedExtensions):
# Check for denied
isAllowed = True
if (newFileExtension in deniedExtensions):
isAllowed = False
else:
# No extension limitations
isAllowed = True
if (isAllowed):
# Upload to operating system
# Map the virtual path to the local server path
currentFolderPath = mapServerFolder(self.userFilesFolder, currentFolder)
i = 0
while (True):
newFilePath = os.path.join (currentFolderPath,newFileName)
if os.path.exists(newFilePath):
i += 1
newFileName = "%s(%04d).%s" % (
newFileNameOnly, i, newFileExtension
)
errorNo= 201 # file renamed
else:
# Read file contents and write to the desired path (similar to php's move_uploaded_file)
fout = file(newFilePath, 'wb')
while (True):
chunk = newFile.file.read(100000)
if not chunk: break
fout.write (chunk)
fout.close()
if os.path.exists ( newFilePath ):
oldumask = os.umask(0)
os.chmod( newFilePath, 0755 )
os.umask( oldumask )
newFileUrl = self.webUserFilesFolder + currentFolder + newFileName
return self.sendUploadResults( errorNo , newFileUrl, newFileName )
else:
return self.sendUploadResults( errorNo = 203, customMsg = "Extension not allowed" )
else:
return self.sendUploadResults( errorNo = 202, customMsg = "No File" )
| Python |
#!/usr/bin/env python
"""
* FCKeditor - The text editor for Internet - http://www.fckeditor.net
* Copyright (C) 2003-2007 Frederico Caldeira Knabben
*
* == BEGIN LICENSE ==
*
* Licensed under the terms of any of the following licenses at your
* choice:
*
* - GNU General Public License Version 2 or later (the "GPL")
* http://www.gnu.org/licenses/gpl.html
*
* - GNU Lesser General Public License Version 2.1 or later (the "LGPL")
* http://www.gnu.org/licenses/lgpl.html
*
* - Mozilla Public License Version 1.1 or later (the "MPL")
* http://www.mozilla.org/MPL/MPL-1.1.html
*
* == END LICENSE ==
*
* Configuration file for the File Manager Connector for Python
"""
# INSTALLATION NOTE: You must set up your server environment accordingly to run
# python scripts. This connector requires Python 2.4 or greater.
#
# Supported operation modes:
# * WSGI (recommended): You'll need apache + mod_python + modpython_gateway
# or any web server capable of the WSGI python standard
# * Plain Old CGI: Any server capable of running standard python scripts
# (although mod_python is recommended for performance)
# This was the previous connector version operation mode
#
# If you're using Apache web server, replace the htaccess.txt to to .htaccess,
# and set the proper options and paths.
# For WSGI and mod_python, you may need to download modpython_gateway from:
# http://projects.amor.org/misc/svn/modpython_gateway.py and copy it in this
# directory.
# SECURITY: You must explicitly enable this "connector". (Set it to "True").
# WARNING: don't just set "ConfigIsEnabled = True", you must be sure that only
# authenticated users can access this file or use some kind of session checking.
Enabled = False
# Path to user files relative to the document root.
UserFilesPath = '/userfiles/'
# Fill the following value it you prefer to specify the absolute path for the
# user files directory. Useful if you are using a virtual directory, symbolic
# link or alias. Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'UserFilesPath' must point to the same directory.
# WARNING: GetRootPath may not work in virtual or mod_python configurations, and
# may not be thread safe. Use this configuration parameter instead.
UserFilesAbsolutePath = ''
# Due to security issues with Apache modules, it is recommended to leave the
# following setting enabled.
ForceSingleExtension = True
# What the user can do with this connector
ConfigAllowedCommands = [ 'QuickUpload', 'FileUpload', 'GetFolders', 'GetFoldersAndFiles', 'CreateFolder' ]
# Allowed Resource Types
ConfigAllowedTypes = ['File', 'Image', 'Flash', 'Media']
# Do not touch this 3 lines, see "Configuration settings for each Resource Type"
AllowedExtensions = {}; DeniedExtensions = {};
FileTypesPath = {}; FileTypesAbsolutePath = {};
QuickUploadPath = {}; QuickUploadAbsolutePath = {};
# Configuration settings for each Resource Type
#
# - AllowedExtensions: the possible extensions that can be allowed.
# If it is empty then any file type can be uploaded.
# - DeniedExtensions: The extensions that won't be allowed.
# If it is empty then no restrictions are done here.
#
# For a file to be uploaded it has to fulfill both the AllowedExtensions
# and DeniedExtensions (that's it: not being denied) conditions.
#
# - FileTypesPath: the virtual folder relative to the document root where
# these resources will be located.
# Attention: It must start and end with a slash: '/'
#
# - FileTypesAbsolutePath: the physical path to the above folder. It must be
# an absolute path.
# If it's an empty string then it will be autocalculated.
# Useful if you are using a virtual directory, symbolic link or alias.
# Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'FileTypesPath' must point to the same directory.
# Attention: It must end with a slash: '/'
#
#
# - QuickUploadPath: the virtual folder relative to the document root where
# these resources will be uploaded using the Upload tab in the resources
# dialogs.
# Attention: It must start and end with a slash: '/'
#
# - QuickUploadAbsolutePath: the physical path to the above folder. It must be
# an absolute path.
# If it's an empty string then it will be autocalculated.
# Useful if you are using a virtual directory, symbolic link or alias.
# Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'QuickUploadPath' must point to the same directory.
# Attention: It must end with a slash: '/'
AllowedExtensions['File'] = ['7z','aiff','asf','avi','bmp','csv','doc','fla','flv','gif','gz','gzip','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','ods','odt','pdf','png','ppt','pxd','qt','ram','rar','rm','rmi','rmvb','rtf','sdc','sitd','swf','sxc','sxw','tar','tgz','tif','tiff','txt','vsd','wav','wma','wmv','xls','xml','zip']
DeniedExtensions['File'] = []
FileTypesPath['File'] = UserFilesPath + 'file/'
FileTypesAbsolutePath['File'] = (not UserFilesAbsolutePath == '') and (UserFilesAbsolutePath + 'file/') or ''
QuickUploadPath['File'] = FileTypesPath['File']
QuickUploadAbsolutePath['File'] = FileTypesAbsolutePath['File']
AllowedExtensions['Image'] = ['bmp','gif','jpeg','jpg','png']
DeniedExtensions['Image'] = []
FileTypesPath['Image'] = UserFilesPath + 'image/'
FileTypesAbsolutePath['Image'] = (not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'image/' or ''
QuickUploadPath['Image'] = FileTypesPath['Image']
QuickUploadAbsolutePath['Image']= FileTypesAbsolutePath['Image']
AllowedExtensions['Flash'] = ['swf','flv']
DeniedExtensions['Flash'] = []
FileTypesPath['Flash'] = UserFilesPath + 'flash/'
FileTypesAbsolutePath['Flash'] = ( not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'flash/' or ''
QuickUploadPath['Flash'] = FileTypesPath['Flash']
QuickUploadAbsolutePath['Flash']= FileTypesAbsolutePath['Flash']
AllowedExtensions['Media'] = ['aiff','asf','avi','bmp','fla', 'flv','gif','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','png','qt','ram','rm','rmi','rmvb','swf','tif','tiff','wav','wma','wmv']
DeniedExtensions['Media'] = []
FileTypesPath['Media'] = UserFilesPath + 'media/'
FileTypesAbsolutePath['Media'] = ( not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'media/' or ''
QuickUploadPath['Media'] = FileTypesPath['Media']
QuickUploadAbsolutePath['Media']= FileTypesAbsolutePath['Media']
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Utility functions for the File Manager Connector for Python
"""
import string, re
import os
import config as Config
# Generic manipulation functions
def removeExtension(fileName):
index = fileName.rindex(".")
newFileName = fileName[0:index]
return newFileName
def getExtension(fileName):
index = fileName.rindex(".") + 1
fileExtension = fileName[index:]
return fileExtension
def removeFromStart(string, char):
return string.lstrip(char)
def removeFromEnd(string, char):
return string.rstrip(char)
# Path functions
def combinePaths( basePath, folder ):
return removeFromEnd( basePath, '/' ) + '/' + removeFromStart( folder, '/' )
def getFileName(filename):
" Purpose: helper function to extrapolate the filename "
for splitChar in ["/", "\\"]:
array = filename.split(splitChar)
if (len(array) > 1):
filename = array[-1]
return filename
def sanitizeFolderName( newFolderName ):
"Do a cleanup of the folder name to avoid possible problems"
# Remove . \ / | : ? *
return re.sub( '\\.|\\\\|\\/|\\||\\:|\\?|\\*', '_', newFolderName )
def sanitizeFileName( newFileName ):
"Do a cleanup of the file name to avoid possible problems"
# Replace dots in the name with underscores (only one dot can be there... security issue).
if ( Config.ForceSingleExtension ): # remove dots
newFileName = re.sub ( '/\\.(?![^.]*$)/', '_', newFileName ) ;
newFileName = newFileName.replace('\\','/') # convert windows to unix path
newFileName = os.path.basename (newFileName) # strip directories
# Remove \ / | : ? *
return re.sub ( '/\\\\|\\/|\\||\\:|\\?|\\*/', '_', newFileName )
def getCurrentFolder(currentFolder):
if not currentFolder:
currentFolder = '/'
# Check the current folder syntax (must begin and end with a slash).
if (currentFolder[-1] <> "/"):
currentFolder += "/"
if (currentFolder[0] <> "/"):
currentFolder = "/" + currentFolder
# Ensure the folder path has no double-slashes
while '//' in currentFolder:
currentFolder = currentFolder.replace('//','/')
# Check for invalid folder paths (..)
if '..' in currentFolder:
return None
return currentFolder
def mapServerPath( environ, url):
" Emulate the asp Server.mapPath function. Given an url path return the physical directory that it corresponds to "
# This isn't correct but for the moment there's no other solution
# If this script is under a virtual directory or symlink it will detect the problem and stop
return combinePaths( getRootPath(environ), url )
def mapServerFolder(resourceTypePath, folderPath):
return combinePaths ( resourceTypePath , folderPath )
def getRootPath(environ):
"Purpose: returns the root path on the server"
# WARNING: this may not be thread safe, and doesn't work w/ VirtualServer/mod_python
# Use Config.UserFilesAbsolutePath instead
if environ.has_key('DOCUMENT_ROOT'):
return environ['DOCUMENT_ROOT']
else:
realPath = os.path.realpath( './' )
selfPath = environ['SCRIPT_FILENAME']
selfPath = selfPath [ : selfPath.rfind( '/' ) ]
selfPath = selfPath.replace( '/', os.path.sep)
position = realPath.find(selfPath)
# This can check only that this script isn't run from a virtual dir
# But it avoids the problems that arise if it isn't checked
raise realPath
if ( position < 0 or position <> len(realPath) - len(selfPath) or realPath[ : position ]==''):
raise Exception('Sorry, can\'t map "UserFilesPath" to a physical path. You must set the "UserFilesAbsolutePath" value in "editor/filemanager/connectors/py/config.py".')
return realPath[ : position ]
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
import os
try: # Windows needs stdio set for binary mode for file upload to work.
import msvcrt
msvcrt.setmode (0, os.O_BINARY) # stdin = 0
msvcrt.setmode (1, os.O_BINARY) # stdout = 1
except ImportError:
pass
from fckutil import *
from fckoutput import *
import config as Config
class GetFoldersCommandMixin (object):
def getFolders(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
s = """<Folders>""" # Open the folders node
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
s += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
s += """</Folders>""" # Close the folders node
return s
class GetFoldersAndFilesCommandMixin (object):
def getFoldersAndFiles(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders and files
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
# Open the folders / files node
folders = """<Folders>"""
files = """<Files>"""
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
folders += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
elif os.path.isfile(someObjectPath):
size = os.path.getsize(someObjectPath)
files += """<File name="%s" size="%s" />""" % (
convertToXmlAttribute(someObject),
os.path.getsize(someObjectPath)
)
# Close the folders / files node
folders += """</Folders>"""
files += """</Files>"""
return folders + files
class CreateFolderCommandMixin (object):
def createFolder(self, resourceType, currentFolder):
"""
Purpose: command to create a new folder
"""
errorNo = 0; errorMsg ='';
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
newFolder = sanitizeFolderName (newFolder)
try:
newFolderPath = mapServerFolder(self.userFilesFolder, combinePaths(currentFolder, newFolder))
self.createServerFolder(newFolderPath)
except Exception, e:
errorMsg = str(e).decode('iso-8859-1').encode('utf-8') # warning with encodigns!!!
if hasattr(e,'errno'):
if e.errno==17: #file already exists
errorNo=0
elif e.errno==13: # permission denied
errorNo = 103
elif e.errno==36 or e.errno==2 or e.errno==22: # filename too long / no such file / invalid name
errorNo = 102
else:
errorNo = 110
else:
errorNo = 102
return self.sendErrorNode ( errorNo, errorMsg )
def createServerFolder(self, folderPath):
"Purpose: physically creates a folder on the server"
# No need to check if the parent exists, just create all hierachy
oldumask = os.umask(0)
os.makedirs(folderPath,mode=0755)
os.umask( oldumask )
class UploadFileCommandMixin (object):
def uploadFile(self, resourceType, currentFolder):
"""
Purpose: command to upload files to server (same as FileUpload)
"""
errorNo = 0
if self.request.has_key("NewFile"):
# newFile has all the contents we need
newFile = self.request.get("NewFile", "")
# Get the file name
newFileName = newFile.filename
newFileName = sanitizeFileName( newFileName )
newFileNameOnly = removeExtension(newFileName)
newFileExtension = getExtension(newFileName).lower()
allowedExtensions = Config.AllowedExtensions[resourceType]
deniedExtensions = Config.DeniedExtensions[resourceType]
if (allowedExtensions):
# Check for allowed
isAllowed = False
if (newFileExtension in allowedExtensions):
isAllowed = True
elif (deniedExtensions):
# Check for denied
isAllowed = True
if (newFileExtension in deniedExtensions):
isAllowed = False
else:
# No extension limitations
isAllowed = True
if (isAllowed):
# Upload to operating system
# Map the virtual path to the local server path
currentFolderPath = mapServerFolder(self.userFilesFolder, currentFolder)
i = 0
while (True):
newFilePath = os.path.join (currentFolderPath,newFileName)
if os.path.exists(newFilePath):
i += 1
newFileName = "%s(%04d).%s" % (
newFileNameOnly, i, newFileExtension
)
errorNo= 201 # file renamed
else:
# Read file contents and write to the desired path (similar to php's move_uploaded_file)
fout = file(newFilePath, 'wb')
while (True):
chunk = newFile.file.read(100000)
if not chunk: break
fout.write (chunk)
fout.close()
if os.path.exists ( newFilePath ):
oldumask = os.umask(0)
os.chmod( newFilePath, 0755 )
os.umask( oldumask )
newFileUrl = self.webUserFilesFolder + currentFolder + newFileName
return self.sendUploadResults( errorNo , newFileUrl, newFileName )
else:
return self.sendUploadResults( errorNo = 203, customMsg = "Extension not allowed" )
else:
return self.sendUploadResults( errorNo = 202, customMsg = "No File" )
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = getCurrentFolder(self.request.get("CurrentFolder",""))
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python and Zope.
This code was not tested at all.
It just was ported from pre 2.5 release, so for further reference see
\editor\filemanager\browser\default\connectors\py\connector.py in previous
releases.
"""
from fckutil import *
from connector import *
import config as Config
class FCKeditorConnectorZope(FCKeditorConnector):
"""
Zope versiof FCKeditorConnector
"""
# Allow access (Zope)
__allow_access_to_unprotected_subobjects__ = 1
def __init__(self, context=None):
"""
Constructor
"""
FCKeditorConnector.__init__(self, environ=None) # call superclass constructor
# Instance Attributes
self.context = context
self.request = FCKeditorRequest(context)
def getZopeRootContext(self):
if self.zopeRootContext is None:
self.zopeRootContext = self.context.getPhysicalRoot()
return self.zopeRootContext
def getZopeUploadContext(self):
if self.zopeUploadContext is None:
folderNames = self.userFilesFolder.split("/")
c = self.getZopeRootContext()
for folderName in folderNames:
if (folderName <> ""):
c = c[folderName]
self.zopeUploadContext = c
return self.zopeUploadContext
def setHeader(self, key, value):
self.context.REQUEST.RESPONSE.setHeader(key, value)
def getFolders(self, resourceType, currentFolder):
# Open the folders node
s = ""
s += """<Folders>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["Folder"]):
s += """<Folder name="%s" />""" % (
convertToXmlAttribute(name)
)
# Close the folders node
s += """</Folders>"""
return s
def getZopeFoldersAndFiles(self, resourceType, currentFolder):
folders = self.getZopeFolders(resourceType, currentFolder)
files = self.getZopeFiles(resourceType, currentFolder)
s = folders + files
return s
def getZopeFiles(self, resourceType, currentFolder):
# Open the files node
s = ""
s += """<Files>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["File","Image"]):
s += """<File name="%s" size="%s" />""" % (
convertToXmlAttribute(name),
((o.get_size() / 1024) + 1)
)
# Close the files node
s += """</Files>"""
return s
def findZopeFolder(self, resourceType, folderName):
# returns the context of the resource / folder
zopeFolder = self.getZopeUploadContext()
folderName = self.removeFromStart(folderName, "/")
folderName = self.removeFromEnd(folderName, "/")
if (resourceType <> ""):
try:
zopeFolder = zopeFolder[resourceType]
except:
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=resourceType, title=resourceType)
zopeFolder = zopeFolder[resourceType]
if (folderName <> ""):
folderNames = folderName.split("/")
for folderName in folderNames:
zopeFolder = zopeFolder[folderName]
return zopeFolder
def createFolder(self, resourceType, currentFolder):
# Find out where we are
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=newFolder, title=newFolder)
else:
errorNo = 102
return self.sendErrorNode ( errorNo, errorMsg )
def uploadFile(self, resourceType, currentFolder, count=None):
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
file = self.request.get("NewFile", None)
fileName = self.getFileName(file.filename)
fileNameOnly = self.removeExtension(fileName)
fileExtension = self.getExtension(fileName).lower()
if (count):
nid = "%s.%s.%s" % (fileNameOnly, count, fileExtension)
else:
nid = fileName
title = nid
try:
zopeFolder.manage_addProduct['OFSP'].manage_addFile(
id=nid,
title=title,
file=file.read()
)
except:
if (count):
count += 1
else:
count = 1
return self.zopeFileUpload(resourceType, currentFolder, count)
return self.sendUploadResults( 0 )
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, context=None):
r = context.REQUEST
self.request = r
def has_key(self, key):
return self.request.has_key(key)
def get(self, key, default=None):
return self.request.get(key, default)
"""
Running from zope, you will need to modify this connector.
If you have uploaded the FCKeditor into Zope (like me), you need to
move this connector out of Zope, and replace the "connector" with an
alias as below. The key to it is to pass the Zope context in, as
we then have a like to the Zope context.
## Script (Python) "connector.py"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters=*args, **kws
##title=ALIAS
##
import Products.zope as connector
return connector.FCKeditorConnectorZope(context=context).doResponse()
"""
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2007 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
return """<script type="text/javascript">
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
} | Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.