code stringlengths 1 1.49M | vector listlengths 0 7.38k | snippet listlengths 0 7.38k |
|---|---|---|
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Sample page.
"""
import cgi
import os
# Ensure that the fckeditor.py is included in your classpath
import fckeditor
# Tell the browser to render html
print "Content-Type: text/html"
print ""
# Document header
print """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
<title>FCKeditor - Sample</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="robots" content="noindex, nofollow">
<link href="../sample.css" rel="stylesheet" type="text/css" />
</head>
<body>
<h1>FCKeditor - Python - Sample 1</h1>
This sample displays a normal HTML form with an FCKeditor with full features
enabled.
<hr>
<form action="sampleposteddata.py" method="post" target="_blank">
"""
# This is the real work
try:
sBasePath = os.environ.get("SCRIPT_NAME")
sBasePath = sBasePath[0:sBasePath.find("_samples")]
oFCKeditor = fckeditor.FCKeditor('FCKeditor1')
oFCKeditor.BasePath = sBasePath
oFCKeditor.Value = """<p>This is some <strong>sample text</strong>. You are using <a href="http://www.fckeditor.net/">FCKeditor</a>.</p>"""
print oFCKeditor.Create()
except Exception, e:
print e
print """
<br>
<input type="submit" value="Submit">
</form>
"""
# For testing your environments
#print "<hr>"
#for key in os.environ.keys():
# print "%s: %s<br>" % (key, os.environ.get(key, ""))
#print "<hr>"
# Document footer
print """
</body>
</html>
"""
| [
[
1,
0,
0.2,
0.2,
0,
0.66,
0,
934,
0,
1,
0,
0,
934,
0,
0
],
[
1,
0,
0.4,
0.2,
0,
0.66,
0.5,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.8,
0.2,
0,
0.66,
1,
669,... | [
"import cgi",
"import os",
"import fckeditor"
] |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This page lists the data posted by a form.
"""
import cgi
import os
# Tell the browser to render html
print "Content-Type: text/html"
print ""
try:
# Create a cgi object
form = cgi.FieldStorage()
except Exception, e:
print e
# Document header
print """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
<title>FCKeditor - Samples - Posted Data</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="robots" content="noindex, nofollow">
<link href="../sample.css" rel="stylesheet" type="text/css" />
</head>
<body>
"""
# This is the real work
print """
<h1>FCKeditor - Samples - Posted Data</h1>
This page lists all data posted by the form.
<hr>
<table border="1" cellspacing="0" id="outputSample">
<colgroup><col width="80"><col></colgroup>
<thead>
<tr>
<th>Field Name</th>
<th>Value</th>
</tr>
</thead>
"""
for key in form.keys():
try:
value = form[key].value
print """
<tr>
<th>%s</th>
<td><pre>%s</pre></td>
</tr>
""" % (cgi.escape(key), cgi.escape(value))
except Exception, e:
print e
print "</table>"
# For testing your environments
#print "<hr>"
#for key in os.environ.keys():
# print "%s: %s<br>" % (key, os.environ.get(key, ""))
#print "<hr>"
# Document footer
print """
</body>
</html>
"""
| [
[
1,
0,
0.3333,
0.3333,
0,
0.66,
0,
934,
0,
1,
0,
0,
934,
0,
0
],
[
1,
0,
0.6667,
0.3333,
0,
0.66,
1,
688,
0,
1,
0,
0,
688,
0,
0
]
] | [
"import cgi",
"import os"
] |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Sample page.
"""
import cgi
import os
# Ensure that the fckeditor.py is included in your classpath
import fckeditor
# Tell the browser to render html
print "Content-Type: text/html"
print ""
# Document header
print """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
<title>FCKeditor - Sample</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="robots" content="noindex, nofollow">
<link href="../sample.css" rel="stylesheet" type="text/css" />
</head>
<body>
<h1>FCKeditor - Python - Sample 1</h1>
This sample displays a normal HTML form with an FCKeditor with full features
enabled.
<hr>
<form action="sampleposteddata.py" method="post" target="_blank">
"""
# This is the real work
try:
sBasePath = os.environ.get("SCRIPT_NAME")
sBasePath = sBasePath[0:sBasePath.find("_samples")]
oFCKeditor = fckeditor.FCKeditor('FCKeditor1')
oFCKeditor.BasePath = sBasePath
oFCKeditor.Value = """<p>This is some <strong>sample text</strong>. You are using <a href="http://www.fckeditor.net/">FCKeditor</a>.</p>"""
print oFCKeditor.Create()
except Exception, e:
print e
print """
<br>
<input type="submit" value="Submit">
</form>
"""
# For testing your environments
#print "<hr>"
#for key in os.environ.keys():
# print "%s: %s<br>" % (key, os.environ.get(key, ""))
#print "<hr>"
# Document footer
print """
</body>
</html>
"""
| [
[
1,
0,
0.2,
0.2,
0,
0.66,
0,
934,
0,
1,
0,
0,
934,
0,
0
],
[
1,
0,
0.4,
0.2,
0,
0.66,
0.5,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.8,
0.2,
0,
0.66,
1,
669,... | [
"import cgi",
"import os",
"import fckeditor"
] |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This page lists the data posted by a form.
"""
import cgi
import os
# Tell the browser to render html
print "Content-Type: text/html"
print ""
try:
# Create a cgi object
form = cgi.FieldStorage()
except Exception, e:
print e
# Document header
print """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
<title>FCKeditor - Samples - Posted Data</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="robots" content="noindex, nofollow">
<link href="../sample.css" rel="stylesheet" type="text/css" />
</head>
<body>
"""
# This is the real work
print """
<h1>FCKeditor - Samples - Posted Data</h1>
This page lists all data posted by the form.
<hr>
<table border="1" cellspacing="0" id="outputSample">
<colgroup><col width="80"><col></colgroup>
<thead>
<tr>
<th>Field Name</th>
<th>Value</th>
</tr>
</thead>
"""
for key in form.keys():
try:
value = form[key].value
print """
<tr>
<th>%s</th>
<td><pre>%s</pre></td>
</tr>
""" % (cgi.escape(key), cgi.escape(value))
except Exception, e:
print e
print "</table>"
# For testing your environments
#print "<hr>"
#for key in os.environ.keys():
# print "%s: %s<br>" % (key, os.environ.get(key, ""))
#print "<hr>"
# Document footer
print """
</body>
</html>
"""
| [
[
1,
0,
0.3333,
0.3333,
0,
0.66,
0,
934,
0,
1,
0,
0,
934,
0,
0
],
[
1,
0,
0.6667,
0.3333,
0,
0.66,
1,
688,
0,
1,
0,
0,
688,
0,
0
]
] | [
"import cgi",
"import os"
] |
#-*- coding: utf-8 -*-
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template.loader import get_template
from django.template import RequestContext
from google.appengine.ext import db
from django.conf import settings
from models.main import App_user, Post, Comment, Tag
from smart_pager import Pager
from logics.main import get_user, set_user
from django.utils import simplejson
import re, datetime
def index(request, page = 1):
page = int(page)
if page < 1:
page = 1
page_size = 10
total = Post.count()
post_list = Post.get_list(page, page_size)
pager = Pager(total, page_size, page, page_list_num = 8)
return render_to_response('index.html', {
'post_list': post_list,
'pager': pager,
}, RequestContext(request))
def show_post(request, post_key):
post = db.get(post_key)
#上一篇 和 下一篇
pre_post = Post.all().order('offset').filter('offset > ', post.offset).get()
next_post = Post.all().order('-offset').filter('offset < ', post.offset).get()
return render_to_response('show_post.html', {
'post': post,
'pre_post': pre_post,
'next_post': next_post,
'user': get_user(),
'comment_list': post.get_comment_list(),
}, RequestContext(request))
def add_comment(request):
post_key = request.POST['post_key'].strip()
post = db.get(post_key)
author = get_user()
nickname = request.POST['nickname'].strip()
email = request.POST['email'].strip()
link = request.POST['link'].strip()
if not link:
link = None
content = request.POST['content'].strip()
if not nickname or not email or not content:
return HttpResponse(simplejson.dumps({'state': 'err', 'msg': u'请完整填写昵称、电子邮件和内容'}))
if not re.match(r'^\w+((_-\w+)|(\.\w+))*\@[A-Za-z0-9]+((\.|-)[A-Za-z0-9]+)*\.[A-Za-z0-9]+$', email):
return HttpResponse(simplejson.dumps({'state': 'err', 'msg': u'邮箱地址格式不正确'}))
ip = request.META["REMOTE_ADDR"]
if author:
Comment.auth_add(post, content, ip, author_nickname = nickname, link = link)
else:
Comment.anonymous_add(post, content, ip, email, nickname, link = link)
return HttpResponse(simplejson.dumps({'state': 'ok', 'msg': u'评论发表成功'}))
def atom(request):
post_list = Post.get_list(1, 10)
return render_to_response('atom.xml', {
'post_list': post_list,
'blog_updated': datetime.datetime.utcnow(),
}, RequestContext(request), mimetype = 'application/atom+xml')
def sitemap(request):
post_list = Post.get_list(1, 200)
return render_to_response('sitemap.xml', {
'post_list': post_list,
'blog_updated': datetime.datetime.utcnow(),
}, RequestContext(request), mimetype="text/xml; charset=utf-8") | [
[
1,
0,
0.023,
0.0115,
0,
0.66,
0,
779,
0,
2,
0,
0,
779,
0,
0
],
[
1,
0,
0.0345,
0.0115,
0,
0.66,
0.0667,
852,
0,
1,
0,
0,
852,
0,
0
],
[
1,
0,
0.046,
0.0115,
0,
0.... | [
"from django.http import HttpResponse, HttpResponseRedirect",
"from django.shortcuts import render_to_response",
"from django.template.loader import get_template",
"from django.template import RequestContext",
"from google.appengine.ext import db",
"from django.conf import settings",
"from models.main i... |
#-*- coding: utf-8 -*-
from django.http import HttpResponse, HttpResponseRedirect
from google.appengine.api import images
from google.appengine.ext import db
from models.main import App_user, Post, Comment, Tag
from common.view_decorator import role_required
from logics.main import get_user, set_user
import datetime
import django
from django.conf import settings
import ai
def get_django_version():
#(1, 0, 'alpha')
return django.VERSION[0]
class Images(db.Model):
uploader = db.ReferenceProperty(App_user)
content = db.BlobProperty()
image_type = db.StringProperty()
add_time = db.DateTimeProperty()
@role_required('admin')
def upload_image(request):
img = Images()
img_file = request.FILES.get('img')
if not img_file:
return HttpResponse('{status:"Please select your image."}')
img.uploader = get_user()
img.add_time = datetime.datetime.utcnow()
if get_django_version() >= 1:
content = img_file.read()
file_path = img_file.name
else:
content = img_file['content']
file_path = img_file['filename']
img.content = db.Blob(content)
img.image_type = ai.get_content_type(file_path)
key = img.put()
return HttpResponse('{status:"UPLOADED",image_url: "/rpc/img/?img_id=%s"}' % (key))
def img(request):
img = db.get(request.GET["img_id"])
if img and img.content:
return HttpResponse(img.content, mimetype=img.image_type)
else:
return HttpResponse("No image") | [
[
1,
0,
0.037,
0.0185,
0,
0.66,
0,
779,
0,
2,
0,
0,
779,
0,
0
],
[
1,
0,
0.0556,
0.0185,
0,
0.66,
0.0769,
279,
0,
1,
0,
0,
279,
0,
0
],
[
1,
0,
0.0741,
0.0185,
0,
0... | [
"from django.http import HttpResponse, HttpResponseRedirect",
"from google.appengine.api import images",
"from google.appengine.ext import db",
"from models.main import App_user, Post, Comment, Tag",
"from common.view_decorator import role_required",
"from logics.main import get_user, set_user",
"import... |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| [
[
1,
0,
0.1818,
0.0909,
0,
0.66,
0,
879,
0,
1,
0,
0,
879,
0,
0
],
[
7,
0,
0.5,
0.5455,
0,
0.66,
0.5,
0,
0,
1,
0,
0,
0,
0,
2
],
[
1,
1,
0.3636,
0.0909,
1,
0.24,
... | [
"from django.core.management import execute_manager",
"try:\n import settings # Assumed to be in the same directory.\nexcept ImportError:\n import sys\n sys.stderr.write(\"Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\\nYou'll have to run dj... |
import logging, os, sys
# Google App Engine imports.
from google.appengine.ext.webapp import util
# Remove the standard version of Django.
for k in [k for k in sys.modules if k.startswith('django')]:
del sys.modules[k]
# Force sys.path to have our own directory first, in case we want to import
# from it.
#sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
# Must set this env var *before* importing any part of Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
libs_path = os.path.abspath('libs')
django_path = os.path.abspath('libs/django.zip')
sys.path.insert(0, django_path)
sys.path.insert(0, libs_path)
import django.core.handlers.wsgi
####################################################3
import django.core.signals
import django.db
import django.dispatch.dispatcher
def log_exception(*args, **kwds):
logging.exception('Exception in request:')
# Log errors.
#django.dispatch.dispatcher.connect(
# log_exception, django.core.signals.got_request_exception)
# Unregister the rollback event handler.
#django.dispatch.dispatcher.disconnect(
# django.db._rollback_on_exception,
# django.core.signals.got_request_exception)
########################################################3
def main():
if django_path not in sys.path:
sys.path.insert(0, django_path)
if libs_path not in sys.path:
sys.path.insert(0, libs_path)
# Create a Django application for WSGI.
application = django.core.handlers.wsgi.WSGIHandler()
# Run the WSGI CGI handler with that application.
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
| [
[
1,
0,
0.0182,
0.0182,
0,
0.66,
0,
715,
0,
3,
0,
0,
715,
0,
0
],
[
1,
0,
0.0727,
0.0182,
0,
0.66,
0.0714,
8,
0,
1,
0,
0,
8,
0,
0
],
[
6,
0,
0.1364,
0.0364,
0,
0.66... | [
"import logging, os, sys",
"from google.appengine.ext.webapp import util",
"for k in [k for k in sys.modules if k.startswith('django')]:\n del sys.modules[k]",
"os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'",
"libs_path = os.path.abspath('libs')",
"django_path = os.path.abspath('libs/django.zip')",... |
#-*- coding: utf-8 -*-
import os, sys, tempfile
has_set = False
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
import logging
logging.basicConfig(format='%(asctime)s %(levelname)8s %(message)s', stream=sys.stdout)
#logging.basicConfig(format='%(asctime)s %(levelname)8s %(message)s',
# filename=BASE_DIR+'/logs/send_feeds.logs',
# filemode='a')
logging.getLogger().setLevel(logging.NOTSET)
def set_app_evn():
global has_set
if has_set:
return
has_set = True
os.chdir(BASE_DIR)
sys.path.insert(0, BASE_DIR)
sys.path.insert(0, os.path.join(BASE_DIR, 'libs'))
sys.path.insert(0, 'C:/Program Files/Google/google_appengine')
#设置app engine环境变量
# from google.appengine.api import apiproxy_stub_map
# from google.appengine.api import datastore_file_stub
#
# app_id = 'myspace-sexy-girls'
# os.environ['APPLICATION_ID'] = app_id
# datastore_path = os.path.join(tempfile.gettempdir(),'dev_appserver.datastore')
# history_path = os.path.join(tempfile.gettempdir(),'dev_appserver.datastore.history')
# require_indexes = False
#
# apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
# datastore = datastore_file_stub.DatastoreFileStub(
# app_id, datastore_path, history_path, require_indexes=require_indexes)
# apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore)
####################33end
app_id = '3kkk-me'
os.environ['APPLICATION_ID'] = app_id
###############django环境变量
django_path = os.path.join(BASE_DIR, 'libs/django.zip')
sys.path.insert(0, django_path)
from django.core.management import setup_environ
import settings
setup_environ(settings)
################3end
set_app_evn()
if __name__ == '__main__':
import code
code.interact('App Engine interactive console', None, locals())
| [
[
1,
0,
0.0364,
0.0182,
0,
0.66,
0,
688,
0,
3,
0,
0,
688,
0,
0
],
[
14,
0,
0.0545,
0.0182,
0,
0.66,
0.125,
973,
1,
0,
0,
0,
0,
4,
0
],
[
14,
0,
0.0727,
0.0182,
0,
0... | [
"import os, sys, tempfile",
"has_set = False",
"BASE_DIR = os.path.dirname(os.path.dirname(__file__))",
"import logging",
"logging.basicConfig(format='%(asctime)s %(levelname)8s %(message)s', stream=sys.stdout)",
"logging.getLogger().setLevel(logging.NOTSET)",
"def set_app_evn():\n global has_set\n ... |
#-*- coding: utf-8 -*-
import set_app_evn
from set_app_evn import BASE_DIR
import ai
import Image, ImageFilter, ImageEnhance
def register(opener):
reg_info = {
'user': 'aksdj990sddddf',
'passwd': 'wushiyito',
'passwdAgain': 'wushiyito',
'answer': 'asdfasdfa',
'question': 'sadfasdf',
'questionbak': 'sadfasdf',
'birthCityId': '2',
'birthCountyId': '5101',
'birthProvinceId': '1',
'birthdayDate': '6',
'birthdayMonth': '7',
'birthdayYear': '2002',
'bru': '',
'checkbox': 'checkbox',
'cityId': '2',
'countyId': '5101',
'from': '',
'mailDomain': '@sohu.com',
'provinceId': '1',
'sex': '0',
'textarea': '',
'vcode': 'dksh',
'vcodeEn': 'lwqIB%2BRTQVn4%2F6fzTSAGyPwTgbIrRlro',
'xpt': '',
}
{
'cfsk': 'RPDBwE5epxPukX0FRCFPnqfwEFV4lH3w',
'ycka': 'EyJ7uuzzod%2FujWBr377ZBvwTgbIrRlro',
'ueby': 'oWA7JqpHK%2F%2BlmU%2BQfVsCzoY8BZ60ctPB',
'xdyb': 'H77Sk8qEEjsrIza58YQvLPwTgbIrRlro',
'dksh': 'lwqIB%2BRTQVn4%2F6fzTSAGyPwTgbIrRlro',
}
#ai.fetch('http://blog.sohu.com/login/reg.do', opener = opener)
reg_headers = [('Referer', 'http://blog.sohu.com/login/reg.do')]
res = ai.fetch('http://blog.sohu.com/login/regnew.do', headers = reg_headers, \
opener = opener, fields = reg_info)
print res['headers'], res['data']
#http://blog.sohu.com/login/regBlog.do?bru=&domain=asdfsdfsdfa&from=&name=title
def vcode_hack():
image_name = 'd:/rand.jpg'
im = Image.open(image_name)
im = im.filter(ImageFilter.MedianFilter())
enhancer = ImageEnhance.Contrast(im)
im = enhancer.enhance(2)
im = im.convert('1')
im.show()
if __name__ == '__main__':
#opener = ai.get_opener()
#register(opener)
vcode_hack()
| [
[
1,
0,
0.0303,
0.0152,
0,
0.66,
0,
920,
0,
1,
0,
0,
920,
0,
0
],
[
1,
0,
0.0455,
0.0152,
0,
0.66,
0.1667,
920,
0,
1,
0,
0,
920,
0,
0
],
[
1,
0,
0.0758,
0.0152,
0,
... | [
"import set_app_evn",
"from set_app_evn import BASE_DIR",
"import ai",
"import Image, ImageFilter, ImageEnhance",
"def register(opener):\n reg_info = {\n 'user': 'aksdj990sddddf',\n 'passwd': 'wushiyito',\n 'passwdAgain': 'wushiyito',\n 'answer': 'asdfasdfa',\n\n 'quest... |
#-*- coding: utf-8 -*-
from threading import local
_thread_locals = local()
def get_user():
try:
return _thread_locals.user
except:
return None
def set_user(u):
_thread_locals.user = u | [
[
1,
0,
0.1538,
0.0769,
0,
0.66,
0,
83,
0,
1,
0,
0,
83,
0,
0
],
[
14,
0,
0.3077,
0.0769,
0,
0.66,
0.3333,
373,
3,
0,
0,
0,
605,
10,
1
],
[
2,
0,
0.6154,
0.3846,
0,
... | [
"from threading import local",
"_thread_locals = local()",
"def get_user():\n try:\n return _thread_locals.user\n except:\n return None",
" try:\n return _thread_locals.user\n except:\n return None",
" return _thread_locals.user",
" return None",
"de... |
#-*- coding: utf-8 -*-
from google.appengine.ext import db
import datetime
import generalcounter
from logics.main import get_user, set_user
import re
class App_user(db.Model):
email = db.EmailProperty(required = True)
nickname = db.StringProperty(required = True)
link = db.StringProperty()
level = db.IntegerProperty()
password = db.StringProperty()
is_verified = db.BooleanProperty(required = True)
comment_count = db.IntegerProperty(default=0)
post_count = db.IntegerProperty(default=0)
add_time = db.DateTimeProperty(required = True)
class Post(db.Model):
offset = db.IntegerProperty(required = True) #递增
title = db.StringProperty(required = True)
content = db.TextProperty(required = True)
author = db.ReferenceProperty(App_user, collection_name = 'add_post_set', required = True)
summary = db.TextProperty()
add_time = db.DateTimeProperty(required = True)
modify_time = db.DateTimeProperty()
modify_user = db.ReferenceProperty(App_user, collection_name = 'modify_post_set')
comment_count = db.IntegerProperty(default=0)
tags = db.StringListProperty()
is_delete = db.BooleanProperty(default = False)
delete_user = db.ReferenceProperty(App_user, collection_name = 'delete_post_set')
@classmethod
def all(cls):
#now = datetime.datetime.utcnow()
return super(Post, cls).all().filter('is_delete', False)
@classmethod
def count(cls):
return int(generalcounter.get_count('post'))
@classmethod
def add(cls, title, content, summary = None, add_time = None):
u = get_user()
count = cls.count()
if add_time is None:
add_time = datetime.datetime.utcnow()
p = cls(parent =u, offset = count+1, title = title, content = content, \
author = u, add_time = add_time)
if summary:
p.summary = summary
u.post_count += 1
def txn():
p.put()
u.put()
db.run_in_transaction(txn)
generalcounter.increment('post')
def modify(self, title, content, summary = None, add_time = None):
u = get_user()
self.title = title
self.content = content
self.summary = summary
if add_time:
self.add_time = add_time
self.modify_time = datetime.datetime.utcnow()
self.modify_user = u
self.put()
def delete(self):
u = get_user()
def txn():
#super(Post, self).delete()
self.is_delete = True
self.delete_user = u
self.author.post_count -= 1
self.put()
self.author.put()
db.run_in_transaction(txn)
generalcounter.decrement('post')
@classmethod
def get_list(cls, page, page_size):
q = cls.all()
q.order('-offset')
return q.fetch(page_size, offset = page_size*(page-1))
def get_comment_list(self):
return Comment.all().filter('post', self).order('add_time').fetch(100)
def to_atom_xml(self):
sep = '<hr class="more">'
return re.sub('&(?!amp;)', '&', self.content).replace(sep, '')
class Comment(db.Model):
email = db.EmailProperty()
nickname = db.StringProperty()
author = db.ReferenceProperty(App_user)
post = db.ReferenceProperty(Post, required = True)
link = db.StringProperty()
content = db.TextProperty()
ip = db.StringProperty()
add_time = db.DateTimeProperty()
is_delete = db.BooleanProperty(default = False)
delete_user = db.ReferenceProperty(App_user, collection_name = 'delete_comment_set')
@classmethod
def all(cls):
return super(Comment, cls).all().filter('is_delete', False)
@classmethod
def auth_add(cls, post, content, ip, author_nickname = None, link = None):
return cls._add(post, content, ip, author_email = None, \
author_nickname = author_nickname, link = link)
@classmethod
def anonymous_add(cls, post, content, ip, author_email, author_nickname, link = None):
return cls._add(post, content, ip, author_email = author_email, \
author_nickname = author_nickname, link = link)
@classmethod
def _add(cls, post, content, ip, author_email = None, author_nickname = None, link = None):
u = get_user()
if author_email is None:
#登录用户发帖
c = cls(parent = post, email = u.email, nickname = u.nickname, author = u, post = post, \
content = content, ip = ip, add_time = datetime.datetime.utcnow())
if link and u.link != link:
u.link = link
if author_nickname and author_nickname != u.nickname:
u.nickname = author_nickname
c.nickname = author_nickname
else:
c = cls(parent = post, email = author_email, nickname = author_nickname, post = post, \
content = content, ip = ip, add_time = datetime.datetime.utcnow())
if link:
c.link = link
def txn():
c.put()
post.comment_count += 1
if author_email is None:
u.comment_count += 1
u.put()
post.put()
db.run_in_transaction(txn)
def delete(self):
u = get_user()
def txn():
self.is_delete = True
self.delete_user = u
self.post.comment_count -= 1
self.put()
self.post.put()
if self.author:
self.author.comment_count -= 1
self.author.put()
db.run_in_transaction(txn)
class Tag(db.Model):
tag = db.StringProperty()
post_count = db.IntegerProperty(default=0)
class Sidebar(db.Model):
title = db.StringProperty(required = True)
content = db.TextProperty(required = True)
order = db.IntegerProperty(required = True, default= 0)
add_time = db.DateTimeProperty(auto_now_add = True)
| [
[
1,
0,
0.0105,
0.0053,
0,
0.66,
0,
167,
0,
1,
0,
0,
167,
0,
0
],
[
1,
0,
0.0211,
0.0053,
0,
0.66,
0.1111,
426,
0,
1,
0,
0,
426,
0,
0
],
[
1,
0,
0.0263,
0.0053,
0,
... | [
"from google.appengine.ext import db",
"import datetime",
"import generalcounter",
"from logics.main import get_user, set_user",
"import re",
"class App_user(db.Model):\n email = db.EmailProperty(required = True)\n nickname = db.StringProperty(required = True)\n link = db.StringProperty()\n\n ... |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| [
[
1,
0,
0.1818,
0.0909,
0,
0.66,
0,
879,
0,
1,
0,
0,
879,
0,
0
],
[
7,
0,
0.5,
0.5455,
0,
0.66,
0.5,
0,
0,
1,
0,
0,
0,
0,
2
],
[
1,
1,
0.3636,
0.0909,
1,
0.17,
... | [
"from django.core.management import execute_manager",
"try:\n import settings # Assumed to be in the same directory.\nexcept ImportError:\n import sys\n sys.stderr.write(\"Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\\nYou'll have to run dj... |
from django.conf.urls.defaults import *
urlpatterns = patterns('views.main',
('^$', 'index'),
('^(?P<page>\d+)/$', 'index'),
('^p/(?P<post_key>.+)/$', 'show_post'),
('^add_comment/$', 'add_comment'),
('^atom.xml$', 'atom'),
('^sitemap.xml$', 'sitemap'),
(r'^admin/', include('admin.urls')),
)
urlpatterns += patterns('',
(r'^logout/$', 'common.auth.logout'),
(r'^rpc/upload_image/$', 'views.rpc.upload_image'),
(r'^rpc/img/$', 'views.rpc.img'),
) | [
[
1,
0,
0.05,
0.05,
0,
0.66,
0,
341,
0,
1,
0,
0,
341,
0,
0
],
[
14,
0,
0.425,
0.6,
0,
0.66,
1,
990,
3,
8,
0,
0,
75,
10,
2
]
] | [
"from django.conf.urls.defaults import *",
"urlpatterns = patterns('views.main',\n ('^$', 'index'),\n ('^(?P<page>\\d+)/$', 'index'),\n ('^p/(?P<post_key>.+)/$', 'show_post'),\n\n ('^add_comment/$', 'add_comment'),\n ('^atom.xml$', 'atom'),\n ('^sitemap.xml$', 'sitemap'),"
] |
#-*- coding: utf-8 -*-
import os, sys, os.path
VERSION = '0.001'
LOCAL = False
DEBUG = True
TEMPLATE_DEBUG = DEBUG
APPEND_SLASH = True
ROOT_PATH = os.path.dirname(__file__)
if LOCAL:
BASE_URL = 'http://localhost:8080'
DOMAIN = 'localhost'
else:
BASE_URL = 'http://www.3kkk.me'
DOMAIN = 'www.3kkk.me'
BLOG_AUTHOR = "3kkk-me"
BLOG_TITLE = "3kkk-me's self-inspection"
BLOG_DESC = u"我的心随风飘荡,抓不住你我"
ADMINS = (
('perol.chen', 'perol.chen@gmail.com'),
)
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Asia/Shanghai'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'zh-cn'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ROOT_PATH + "/static/"
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = BASE_URL + '/static'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '-nc*k(=13$ak$&sc_#2fu$9p_vwt(ckv=hy*0qr(4%jrr)ceap'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.core.context_processors.media',
'common.context_processors.bind_settings',
)
MIDDLEWARE_CLASSES = (
'common.domain_trans.Domain_transMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.contrib.sessions.middleware.SessionMiddleware',
#'django.contrib.auth.middleware.AuthenticationMiddleware',
#'django.middleware.doc.XViewMiddleware',
'appengine_utilities.django-middleware.middleware.SessionMiddleware',
'common.auth.AuthMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
ROOT_PATH + '/templates',
)
INSTALLED_APPS = (
'common',
'admin',
)
| [
[
1,
0,
0.0198,
0.0099,
0,
0.66,
0,
688,
0,
3,
0,
0,
688,
0,
0
],
[
14,
0,
0.0396,
0.0099,
0,
0.66,
0.0385,
557,
1,
0,
0,
0,
0,
3,
0
],
[
14,
0,
0.0594,
0.0099,
0,
... | [
"import os, sys, os.path",
"VERSION = '0.001'",
"LOCAL = False",
"DEBUG = True",
"TEMPLATE_DEBUG = DEBUG",
"APPEND_SLASH = True",
"ROOT_PATH = os.path.dirname(__file__)",
"if LOCAL:\n BASE_URL = 'http://localhost:8080'\n DOMAIN = 'localhost'\nelse:\n BASE_URL = 'http://www.3kkk.me'\n DOMAI... |
from django.conf.urls.defaults import *
urlpatterns = patterns('admin.views',
('^$', 'index'),
('^left/$', 'left'),
('^post_list/$', 'post_list'),
('^add_post/', 'add_post'),
('^comment_list/$', 'comment_list'),
('^user_list/', 'user_list'),
('^add_post_post/', 'add_post_post'),
('^del_post/', 'del_post'),
('^edit_post/', 'edit_post'),
('^edit_post_post/', 'edit_post_post'),
('^comment_list/$', 'comment_list'),
('^del_comment/', 'del_comment'),
('^user_list/$', 'user_list'),
('^sidebar_list/$', 'sidebar_list'),
('^sidebar_add/$', 'sidebar_add'),
('^sidebar_add_post/$', 'sidebar_add_post'),
('^sidebar_edit/$', 'sidebar_edit'),
('^sidebar_edit_post/$', 'sidebar_edit_post'),
('^sidebar_delete/$', 'sidebar_delete'),
)
| [
[
1,
0,
0.0323,
0.0323,
0,
0.66,
0,
341,
0,
1,
0,
0,
341,
0,
0
],
[
14,
0,
0.5484,
0.9355,
0,
0.66,
1,
990,
3,
20,
0,
0,
75,
10,
1
]
] | [
"from django.conf.urls.defaults import *",
"urlpatterns = patterns('admin.views',\n ('^$', 'index'),\n ('^left/$', 'left'),\n\n ('^post_list/$', 'post_list'),\n ('^add_post/', 'add_post'),\n ('^comment_list/$', 'comment_list'),\n ('^user_list/', 'user_list'),"
] |
#-*- coding: utf-8 -*-
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template.loader import get_template
from django.template import RequestContext
from google.appengine.ext import db
from common.view_decorator import role_required
from models.main import App_user, Post, Comment, Tag, Sidebar
from smart_pager import Pager
import datetime
from common.time_utils import parse_time
@role_required('admin')
def index(request):
return render_to_response('admin/index.html', {
}, RequestContext(request))
@role_required('admin')
def left(request):
return render_to_response('admin/left.html', {
}, RequestContext(request))
@role_required('admin')
def add_post(request):
return render_to_response('admin/add_post.html', {
}, RequestContext(request))
@role_required('admin')
def add_post_post(request):
title = request.POST['title'].strip()
content = request.POST['content'].strip()
sep = '<hr class="more">'
sep_index = content.find(sep)
if sep_index != -1:
summary = content[0:sep_index]
else:
summary = None
add_time_date = request.POST.get('add_time_date').strip()
add_time_time = request.POST.get('add_time_time').strip()
if add_time_date and add_time_time:
add_time = parse_time(add_time_date + ' '+ add_time_time)
else:
add_time = None
Post.add(title, content, summary = summary, add_time = add_time)
return HttpResponse('ok')
@role_required('admin')
def post_list(request):
page = int(request.GET.get('page', 1))
if page < 1:
page = 1
page_size = 15
total = Post.count()
post_list = Post.get_list(page, page_size)
pager = Pager(total, page_size, page, page_list_num = 10)
return render_to_response('admin/post_list.html', {
'post_list': post_list,
'pager': pager,
}, RequestContext(request))
@role_required('admin')
def del_post(request):
post_key = request.POST['post_key']
post = db.get(post_key)
post.delete()
return HttpResponse('ok')
@role_required('admin')
def edit_post(request):
post_key = request.GET['post_key']
post = db.get(post_key)
return render_to_response('admin/edit_post.html', {
'post': post,
}, RequestContext(request))
@role_required('admin')
def edit_post_post(request):
post_key = request.GET['post_key']
post = db.get(post_key)
title = request.POST['title'].strip()
content = request.POST['content'].strip()
sep = '<hr class="more">'
sep_index = content.find(sep)
if sep_index != -1:
summary = content[0:sep_index]
else:
summary = None
add_time_date = request.POST.get('add_time_date').strip()
add_time_time = request.POST.get('add_time_time').strip()
if add_time_date and add_time_time:
add_time = parse_time(add_time_date + ' '+ add_time_time)
else:
add_time = None
post.modify(title, content, summary = summary, add_time = add_time)
return HttpResponse('ok')
@role_required('admin')
def comment_list(request):
page = int(request.GET.get('page', 1))
if page < 1:
page = 1
comment_list = Comment.all().order('-add_time').fetch(150)
return render_to_response('admin/comment_list.html', {
'comment_list': comment_list,
}, RequestContext(request))
@role_required('admin')
def del_comment(request):
commnet_key = request.POST['commnet_key']
comment = db.get(commnet_key)
comment.delete()
return HttpResponse('ok')
@role_required('admin')
def user_list(request):
page = int(request.GET.get('page', 1))
if page < 1:
page = 1
user_list = App_user.all().order('-add_time').fetch(150)
return render_to_response('admin/user_list.html', {
'user_list': user_list,
}, RequestContext(request))
@role_required('admin')
def sidebar_list(request):
sidebar_list = Sidebar.all().order('-add_time').fetch(100)
return render_to_response('admin/sidebar_list.html', {
'sidebar_list': sidebar_list,
}, RequestContext(request))
@role_required('admin')
def sidebar_add(request):
return render_to_response('admin/sidebar_add.html', {
}, RequestContext(request))
@role_required('admin')
def sidebar_edit(request):
item_key = request.GET['item_key']
sidebar = db.get(item_key)
return render_to_response('admin/sidebar_edit.html', {
'sidebar': sidebar
}, RequestContext(request))
@role_required('admin')
def sidebar_edit_post(request):
title = request.POST['title']
content = request.POST['content']
order = int(request.POST['order'])
item_key = request.GET['item_key']
sidebar = db.get(item_key)
sidebar.title = title
sidebar.content = content
sidebar.order = order
sidebar.put()
return HttpResponseRedirect('/admin/sidebar_list/')
@role_required('admin')
def sidebar_delete(request):
item_key = request.POST['item_key']
sidebar = db.get(item_key)
sidebar.delete()
return HttpResponse('ok')
@role_required('admin')
def sidebar_add_post(request):
title = request.POST['title']
content = request.POST['content']
order = int(request.POST['order'])
s = Sidebar(title = title, content = content, order = order)
s.put()
return HttpResponseRedirect('/admin/sidebar_list/')
| [
[
1,
0,
0.0099,
0.005,
0,
0.66,
0,
779,
0,
2,
0,
0,
779,
0,
0
],
[
1,
0,
0.0149,
0.005,
0,
0.66,
0.0385,
852,
0,
1,
0,
0,
852,
0,
0
],
[
1,
0,
0.0198,
0.005,
0,
0.6... | [
"from django.http import HttpResponse, HttpResponseRedirect",
"from django.shortcuts import render_to_response",
"from django.template.loader import get_template",
"from django.template import RequestContext",
"from google.appengine.ext import db",
"from common.view_decorator import role_required",
"fro... |
'''
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
Neither the name of the appengine-utilities project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import os, cgi, __main__
from google.appengine.ext.webapp import template
import wsgiref.handlers
from google.appengine.ext import webapp
from google.appengine.api import memcache
from google.appengine.ext import db
from appengine_utilities import cron
class MainPage(webapp.RequestHandler):
def get(self):
c = cron.Cron()
query = cron._AppEngineUtilities_Cron.all()
results = query.fetch(1000)
template_values = {"cron_entries" : results}
path = os.path.join(os.path.dirname(__file__), 'templates/scheduler_form.html')
self.response.out.write(template.render(path, template_values))
def post(self):
if str(self.request.get('action')) == 'Add':
cron.Cron().add_cron(str(self.request.get('cron_entry')))
elif str(self.request.get('action')) == 'Delete':
entry = db.get(db.Key(str(self.request.get('key'))))
entry.delete()
query = cron._AppEngineUtilities_Cron.all()
results = query.fetch(1000)
template_values = {"cron_entries" : results}
path = os.path.join(os.path.dirname(__file__), 'templates/scheduler_form.html')
self.response.out.write(template.render(path, template_values))
def main():
application = webapp.WSGIApplication(
[('/gaeutilities/', MainPage)],
debug=True)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == "__main__":
main() | [
[
8,
0,
0.12,
0.22,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.26,
0.02,
0,
0.66,
0.1,
688,
0,
3,
0,
0,
688,
0,
0
],
[
1,
0,
0.28,
0.02,
0,
0.66,
0.2,
... | [
"'''\nCopyright (c) 2008, appengine-utilities project\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:\nRedistributions of source code must retain the above copyright notice, this list of condition... |
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# main python imports
import os
import time
import datetime
import random
import md5
import Cookie
import pickle
import __main__
from time import strftime
# google appengine imports
from google.appengine.ext import db
from google.appengine.api import memcache
#django simplejson import, used for flash
from django.utils import simplejson
from rotmodel import ROTModel
# settings, if you have these set elsewhere, such as your django settings file,
# you'll need to adjust the values to pull from there.
class _AppEngineUtilities_Session(ROTModel):
"""
Model for the sessions in the datastore. This contains the identifier and
validation information for the session.
"""
sid = db.StringListProperty()
ip = db.StringProperty()
ua = db.StringProperty()
last_activity = db.DateTimeProperty(auto_now=True)
class _AppEngineUtilities_SessionData(ROTModel):
"""
Model for the session data in the datastore.
"""
session = db.ReferenceProperty(_AppEngineUtilities_Session)
keyname = db.StringProperty()
content = db.BlobProperty()
class _DatastoreWriter(object):
def put(self, keyname, value, session):
"""
Insert a keyname/value pair into the datastore for the session.
Args:
keyname: The keyname of the mapping.
value: The value of the mapping.
"""
keyname = session._validate_key(keyname)
if value is None:
raise ValueError('You must pass a value to put.')
# datestore write trumps cookie. If there is a cookie value
# with this keyname, delete it so we don't have conflicting
# entries.
if session.cookie_vals.has_key(keyname):
del(session.cookie_vals[keyname])
session.output_cookie[session.cookie_name + '_data'] = \
simplejson.dumps(session.cookie_vals)
print session.output_cookie.output()
sessdata = session._get(keyname=keyname)
if sessdata is None:
sessdata = _AppEngineUtilities_SessionData()
sessdata.session = session.session
sessdata.keyname = keyname
sessdata.content = pickle.dumps(value)
# UNPICKLING CACHE session.cache[keyname] = pickle.dumps(value)
session.cache[keyname] = value
sessdata.put()
session._set_memcache()
class _CookieWriter(object):
def put(self, keyname, value, session):
"""
Insert a keyname/value pair into the datastore for the session.
Args:
keyname: The keyname of the mapping.
value: The value of the mapping.
"""
keyname = session._validate_key(keyname)
if value is None:
raise ValueError('You must pass a value to put.')
# Use simplejson for cookies instead of pickle.
session.cookie_vals[keyname] = value
# update the requests session cache as well.
session.cache[keyname] = value
session.output_cookie[session.cookie_name + '_data'] = \
simplejson.dumps(session.cookie_vals)
print session.output_cookie.output()
class Session(object):
"""
Sessions used to maintain user presence between requests.
Sessions store a unique id as a cookie in the browser and
referenced in a datastore object. This maintains user presence
by validating requests as visits from the same browser.
You can add extra data to the session object by using it
as a dictionary object. Values can be any python object that
can be pickled.
For extra performance, session objects are also store in
memcache and kept consistent with the datastore. This
increases the performance of read requests to session
data.
"""
COOKIE_NAME = 'appengine-utilities-session-sid' # session token
DEFAULT_COOKIE_PATH = '/'
SESSION_EXPIRE_TIME = 7200 # sessions are valid for 7200 seconds (2 hours)
CLEAN_CHECK_PERCENT = 50 # By default, 50% of all requests will clean the database
INTEGRATE_FLASH = True # integrate functionality from flash module?
CHECK_IP = True # validate sessions by IP
CHECK_USER_AGENT = True # validate sessions by user agent
SET_COOKIE_EXPIRES = True # Set to True to add expiration field to cookie
SESSION_TOKEN_TTL = 5 # Number of seconds a session token is valid for.
UPDATE_LAST_ACTIVITY = 60 # Number of seconds that may pass before
# last_activity is updated
WRITER = "datastore" # Use the datastore writer by default. cookie is the
# other option.
def __init__(self, cookie_path=DEFAULT_COOKIE_PATH,
cookie_name=COOKIE_NAME,
session_expire_time=SESSION_EXPIRE_TIME,
clean_check_percent=CLEAN_CHECK_PERCENT,
integrate_flash=INTEGRATE_FLASH, check_ip=CHECK_IP,
check_user_agent=CHECK_USER_AGENT,
set_cookie_expires=SET_COOKIE_EXPIRES,
session_token_ttl=SESSION_TOKEN_TTL,
last_activity_update=UPDATE_LAST_ACTIVITY,
writer=WRITER):
"""
Initializer
Args:
cookie_name: The name for the session cookie stored in the browser.
session_expire_time: The amount of time between requests before the
session expires.
clean_check_percent: The percentage of requests the will fire off a
cleaning routine that deletes stale session data.
integrate_flash: If appengine-utilities flash utility should be
integrated into the session object.
check_ip: If browser IP should be used for session validation
check_user_agent: If the browser user agent should be used for
sessoin validation.
set_cookie_expires: True adds an expires field to the cookie so
it saves even if the browser is closed.
session_token_ttl: Number of sessions a session token is valid
for before it should be regenerated.
"""
self.cookie_path = cookie_path
self.cookie_name = cookie_name
self.session_expire_time = session_expire_time
self.integrate_flash = integrate_flash
self.check_user_agent = check_user_agent
self.check_ip = check_ip
self.set_cookie_expires = set_cookie_expires
self.session_token_ttl = session_token_ttl
self.last_activity_update = last_activity_update
self.writer = writer
# make sure the page is not cached in the browser
self.no_cache_headers()
# Check the cookie and, if necessary, create a new one.
self.cache = {}
string_cookie = os.environ.get('HTTP_COOKIE', '')
self.cookie = Cookie.SimpleCookie()
self.output_cookie = Cookie.SimpleCookie()
self.cookie.load(string_cookie)
try:
self.cookie_vals = \
simplejson.loads(self.cookie[self.cookie_name + '_data'].value)
# sync self.cache and self.cookie_vals which will make those
# values available for all gets immediately.
for k in self.cookie_vals:
self.cache[k] = self.cookie_vals[k]
self.output_cookie[self.cookie_name + '_data'] = self.cookie[self.cookie_name + '_data']
# sync the input cookie with the output cookie
except:
self.cookie_vals = {}
if writer == "cookie":
pass
else:
self.sid = None
new_session = True
# do_put is used to determine if a datastore write should
# happen on this request.
do_put = False
# check for existing cookie
if self.cookie.get(cookie_name):
self.sid = self.cookie[cookie_name].value
self.session = self._get_session() # will return None if
# sid expired
if self.session:
new_session = False
if new_session:
# start a new session
self.session = _AppEngineUtilities_Session()
self.session.put()
self.sid = self.new_sid()
if 'HTTP_USER_AGENT' in os.environ:
self.session.ua = os.environ['HTTP_USER_AGENT']
else:
self.session.ua = None
if 'REMOTE_ADDR' in os.environ:
self.session.ip = os.environ['REMOTE_ADDR']
else:
self.session.ip = None
self.session.sid = [self.sid]
# do put() here to get the session key
key = self.session.put()
else:
# check the age of the token to determine if a new one
# is required
duration = datetime.timedelta(seconds=self.session_token_ttl)
session_age_limit = datetime.datetime.now() - duration
if self.session.last_activity < session_age_limit:
self.sid = self.new_sid()
if len(self.session.sid) > 2:
self.session.sid.remove(self.session.sid[0])
self.session.sid.append(self.sid)
do_put = True
else:
self.sid = self.session.sid[-1]
# check if last_activity needs updated
ula = datetime.timedelta(seconds=self.last_activity_update)
if datetime.datetime.now() > self.session.last_activity + ula:
do_put = True
self.output_cookie[cookie_name] = self.sid
self.output_cookie[cookie_name]['path'] = cookie_path
# UNPICKLING CACHE self.cache['sid'] = pickle.dumps(self.sid)
self.cache['sid'] = self.sid
if do_put:
if self.sid != None or self.sid != "":
self.session.put()
if self.set_cookie_expires:
if not self.output_cookie.has_key(cookie_name + '_data'):
self.output_cookie[cookie_name + '_data'] = ""
self.output_cookie[cookie_name + '_data']['expires'] = \
self.session_expire_time
print self.output_cookie.output()
# fire up a Flash object if integration is enabled
if self.integrate_flash:
import flash
self.flash = flash.Flash(cookie=self.cookie)
# randomly delete old stale sessions in the datastore (see
# CLEAN_CHECK_PERCENT variable)
if random.randint(1, 100) < clean_check_percent:
self._clean_old_sessions()
def new_sid(self):
"""
Create a new session id.
"""
sid = str(self.session.key()) + md5.new(repr(time.time()) + \
str(random.random())).hexdigest()
return sid
def _get_session(self):
"""
Get the user's session from the datastore
"""
query = _AppEngineUtilities_Session.all()
query.filter('sid', self.sid)
if self.check_user_agent:
query.filter('ua', os.environ['HTTP_USER_AGENT'])
if self.check_ip:
query.filter('ip', os.environ['REMOTE_ADDR'])
results = query.fetch(1)
if len(results) is 0:
return None
else:
sessionAge = datetime.datetime.now() - results[0].last_activity
if sessionAge.seconds > self.session_expire_time:
results[0].delete()
return None
return results[0]
def _get(self, keyname=None):
"""
Return all of the SessionData object data from the datastore onlye,
unless keyname is specified, in which case only that instance of
SessionData is returned.
Important: This does not interact with memcache and pulls directly
from the datastore. This also does not get items from the cookie
store.
Args:
keyname: The keyname of the value you are trying to retrieve.
"""
query = _AppEngineUtilities_SessionData.all()
query.filter('session', self.session)
if keyname != None:
query.filter('keyname =', keyname)
results = query.fetch(1000)
if len(results) is 0:
return None
if keyname != None:
return results[0]
return results
def _validate_key(self, keyname):
"""
Validate the keyname, making sure it is set and not a reserved name.
"""
if keyname is None:
raise ValueError('You must pass a keyname for the session' + \
' data content.')
elif keyname in ('sid', 'flash'):
raise ValueError(keyname + ' is a reserved keyname.')
if type(keyname) != type([str, unicode]):
return str(keyname)
return keyname
def _put(self, keyname, value):
"""
Insert a keyname/value pair into the datastore for the session.
Args:
keyname: The keyname of the mapping.
value: The value of the mapping.
"""
if self.writer == "datastore":
writer = _DatastoreWriter()
else:
writer = _CookieWriter()
writer.put(keyname, value, self)
def _delete_session(self):
"""
Delete the session and all session data.
"""
if hasattr(self, "session"):
sessiondata = self._get()
# delete from datastore
if sessiondata is not None:
for sd in sessiondata:
sd.delete()
# delete from memcache
memcache.delete('sid-'+str(self.session.key()))
# delete the session now that all items that reference it are deleted.
self.session.delete()
# unset any cookie values that may exist
self.cookie_vals = {}
self.cache = {}
self.output_cookie[self.cookie_name + '_data'] = \
simplejson.dumps(self.cookie_vals)
print self.output_cookie.output()
# if the event class has been loaded, fire off the sessionDeleted event
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('sessionDelete')
def delete(self):
"""
Delete the current session and start a new one.
This is useful for when you need to get rid of all data tied to a
current session, such as when you are logging out a user.
"""
self._delete_session()
@classmethod
def delete_all_sessions(cls):
"""
Deletes all sessions and session data from the data store and memcache:
NOTE: This is not fully developed. It also will not delete any cookie
data as this does not work for each incoming request. Keep this in mind
if you are using the cookie writer.
"""
all_sessions_deleted = False
all_data_deleted = False
while not all_sessions_deleted:
query = _AppEngineUtilities_Session.all()
results = query.fetch(75)
if len(results) is 0:
all_sessions_deleted = True
else:
for result in results:
memcache.delete('sid-' + str(result.key()))
result.delete()
while not all_data_deleted:
query = _AppEngineUtilities_SessionData.all()
results = query.fetch(75)
if len(results) is 0:
all_data_deleted = True
else:
for result in results:
result.delete()
def _clean_old_sessions(self):
"""
Delete expired sessions from the datastore.
This is only called for CLEAN_CHECK_PERCENT percent of requests because
it could be rather intensive.
"""
duration = datetime.timedelta(seconds=self.session_expire_time)
session_age = datetime.datetime.now() - duration
query = _AppEngineUtilities_Session.all()
query.filter('last_activity <', session_age)
results = query.fetch(50)
for result in results:
data_query = _AppEngineUtilities_SessionData.all()
data_query.filter('session', result)
data_results = data_query.fetch(1000)
for data_result in data_results:
data_result.delete()
memcache.delete('sid-'+str(result.key()))
result.delete()
# Implement Python container methods
def __getitem__(self, keyname):
"""
Get item from session data.
keyname: The keyname of the mapping.
"""
# flash messages don't go in the datastore
if self.integrate_flash and (keyname == 'flash'):
return self.flash.msg
if keyname in self.cache:
# UNPICKLING CACHE return pickle.loads(str(self.cache[keyname]))
return self.cache[keyname]
if keyname in self.cookie_vals:
return self.cookie_vals[keyname]
if hasattr(self, "session"):
mc = memcache.get('sid-'+str(self.session.key()))
if mc is not None:
if keyname in mc:
return mc[keyname]
data = self._get(keyname)
if data:
#UNPICKLING CACHE self.cache[keyname] = data.content
self.cache[keyname] = pickle.loads(data.content)
self._set_memcache()
return pickle.loads(data.content)
else:
raise KeyError(str(keyname))
raise KeyError(str(keyname))
def __setitem__(self, keyname, value):
"""
Set item in session data.
Args:
keyname: They keyname of the mapping.
value: The value of mapping.
"""
if self.integrate_flash and (keyname == 'flash'):
self.flash.msg = value
else:
keyname = self._validate_key(keyname)
self.cache[keyname] = value
# self._set_memcache() # commented out because this is done in the datestore put
return self._put(keyname, value)
def __delitem__(self, keyname):
"""
Delete item from session data.
Args:
keyname: The keyname of the object to delete.
"""
bad_key = False
sessdata = self._get(keyname = keyname)
if sessdata is None:
bad_key = True
else:
sessdata.delete()
if keyname in self.cookie_vals:
del self.cookie_vals[keyname]
bad_key = False
self.output_cookie[self.cookie_name + '_data'] = \
simplejson.dumps(self.cookie_vals)
print self.output_cookie.output()
if bad_key:
raise KeyError(str(keyname))
if keyname in self.cache:
del self.cache[keyname]
self._set_memcache()
def __len__(self):
"""
Return size of session.
"""
# check memcache first
if hasattr(self, "session"):
mc = memcache.get('sid-'+str(self.session.key()))
if mc is not None:
return len(mc) + len(self.cookie_vals)
results = self._get()
if results is not None:
return len(results) + len(self.cookie_vals)
else:
return 0
return len(self.cookie_vals)
def __contains__(self, keyname):
"""
Check if an item is in the session data.
Args:
keyname: The keyname being searched.
"""
try:
r = self.__getitem__(keyname)
except KeyError:
return False
return True
def __iter__(self):
"""
Iterate over the keys in the session data.
"""
# try memcache first
if hasattr(self, "session"):
mc = memcache.get('sid-'+str(self.session.key()))
if mc is not None:
for k in mc:
yield k
else:
for k in self._get():
yield k.keyname
for k in self.cookie_vals:
yield k
def __str__(self):
"""
Return string representation.
"""
#if self._get():
return '{' + ', '.join(['"%s" = "%s"' % (k, self[k]) for k in self]) + '}'
#else:
# return []
def _set_memcache(self):
"""
Set a memcache object with all the session data. Optionally you can
add a key and value to the memcache for put operations.
"""
# Pull directly from the datastore in order to ensure that the
# information is as up to date as possible.
if self.writer == "datastore":
data = {}
sessiondata = self._get()
if sessiondata is not None:
for sd in sessiondata:
data[sd.keyname] = pickle.loads(sd.content)
memcache.set('sid-'+str(self.session.key()), data, \
self.session_expire_time)
def cycle_key(self):
"""
Changes the session id.
"""
self.sid = self.new_sid()
if len(self.session.sid) > 2:
self.session.sid.remove(self.session.sid[0])
self.session.sid.append(self.sid)
def flush(self):
"""
Delete's the current session, creating a new one.
"""
self._delete_session()
self.__init__()
def no_cache_headers(self):
"""
Adds headers, avoiding any page caching in the browser. Useful for highly
dynamic sites.
"""
print "Expires: Tue, 03 Jul 2001 06:00:00 GMT"
print strftime("Last-Modified: %a, %d %b %y %H:%M:%S %Z")
print "Cache-Control: no-store, no-cache, must-revalidate, max-age=0"
print "Cache-Control: post-check=0, pre-check=0"
print "Pragma: no-cache"
def clear(self):
"""
Remove all items
"""
sessiondata = self._get()
# delete from datastore
if sessiondata is not None:
for sd in sessiondata:
sd.delete()
# delete from memcache
memcache.delete('sid-'+str(self.session.key()))
self.cache = {}
self.cookie_vals = {}
self.output_cookie[self.cookie_name + '_data'] = \
simplejson.dumps(self.cookie_vals)
print self.output_cookie.output()
def has_key(self, keyname):
"""
Equivalent to k in a, use that form in new code
"""
return self.__contains__(keyname)
def items(self):
"""
A copy of list of (key, value) pairs
"""
op = {}
for k in self:
op[k] = self[k]
return op
def keys(self):
"""
List of keys.
"""
l = []
for k in self:
l.append(k)
return l
def update(*dicts):
"""
Updates with key/value pairs from b, overwriting existing keys, returns None
"""
for dict in dicts:
for k in dict:
self._put(k, dict[k])
return None
def values(self):
"""
A copy list of values.
"""
v = []
for k in self:
v.append(self[k])
return v
def get(self, keyname, default = None):
"""
a[k] if k in a, else x
"""
try:
return self.__getitem__(keyname)
except KeyError:
if default is not None:
return default
return None
def setdefault(self, keyname, default = None):
"""
a[k] if k in a, else x (also setting it)
"""
try:
return self.__getitem__(keyname)
except KeyError:
if default is not None:
self.__setitem__(keyname, default)
return default
return None
@classmethod
def check_token(cls, cookie_name=COOKIE_NAME, delete_invalid=True):
"""
Retrieves the token from a cookie and validates that it is
a valid token for an existing cookie. Cookie validation is based
on the token existing on a session that has not expired.
This is useful for determining if datastore or cookie writer
should be used in hybrid implementations.
Args:
cookie_name: Name of the cookie to check for a token.
delete_invalid: If the token is not valid, delete the session
cookie, to avoid datastore queries on future
requests.
Returns True/False
"""
string_cookie = os.environ.get('HTTP_COOKIE', '')
cookie = Cookie.SimpleCookie()
cookie.load(string_cookie)
if cookie.has_key(cookie_name):
query = _AppEngineUtilities_Session.all()
query.filter('sid', cookie[cookie_name].value)
results = query.fetch(1)
if len(results) > 0:
return True
else:
if delete_invalid:
output_cookie = Cookie.SimpleCookie()
output_cookie[cookie_name] = cookie[cookie_name]
output_cookie[cookie_name]['expires'] = 0
print output_cookie.output()
return False
| [
[
8,
0,
0.0178,
0.0343,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0382,
0.0013,
0,
0.66,
0.0556,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0395,
0.0013,
0,
0.66... | [
"\"\"\"\nCopyright (c) 2008, appengine-utilities project\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n- Redistributions of source code must retain the above copyright notice, this\n list of ... |
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from google.appengine.ext import db
from cache import Cache
class Paginator(object):
"""
This class is used for maintaining pagination objects.
"""
@classmethod
def get(cls, count=10, q_filters={}, search=None, start=None, model=None, \
order='ASC', order_by='__key__'):
"""
get queries the database on model, starting with key, ordered by
order. It receives count + 1 items, returning count and setting a
next field to the count + 1 item key. It then reverses the sort, and
grabs count objects, returning the last as a the previous.
Arguments:
count: The amount of entries to pull on query
q_filter: The filter value (optional)
search: Search is used for SearchableModel searches
start: The key to start the page from
model: The Model object to query against. This is not a
string, it must be a Model derived object.
order: The order in which to pull the values.
order_by: The attribute to order results by. This defaults to
__key__
Returns a dict:
{
'next': next_key,
'prev': prev_key,
'items': entities_pulled
}
"""
# argument validation
if model == None:
raise ValueError('You must pass a model to query')
# a valid model object will have a gql method.
if callable(model.gql) == False:
raise TypeError('model must be a valid model object.')
# cache check
cache_string = "gae_paginator_"
for q_filter in q_filters:
cache_string = cache_string + q_filter + "_" + q_filters[q_filter] + "_"
cache_string = cache_string + "index"
c = Cache()
if c.has_key(cache_string):
return c[cache_string]
# build query
query = model.all()
if len(q_filters) > 0:
for q_filter in q_filters:
query.filter(q_filter + " = ", q_filters[q_filter])
if start:
if order.lower() == "DESC".lower():
query.filter(order_by + " <", start)
else:
query.filter(order_by + " >", start)
if search:
query.search(search)
if order.lower() == "DESC".lower():
query.order("-" + order_by)
else:
query.order(order_by)
results = query.fetch(count + 1)
if len(results) == count + 1:
next = getattr(results[count - 1], order_by)
# reverse the query to get the value for previous
if start is not None:
rquery = model.all()
for q_filter in q_filters:
rquery.filter(q_filter + " = ", q_filters[q_filter])
if search:
query.search(search)
if order.lower() == "DESC".lower():
rquery.order(order_by)
else:
rquery.order("-" + order_by)
rresults = rquery.fetch(count)
previous = getattr(results[0], order_by)
else:
previous = None
else:
next = None
return {
"results": results,
"next": next,
"previous": previous
}
| [
[
8,
0,
0.1116,
0.2149,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.2314,
0.0083,
0,
0.66,
0.3333,
167,
0,
1,
0,
0,
167,
0,
0
],
[
1,
0,
0.2397,
0.0083,
0,
0.66... | [
"\"\"\"\nCopyright (c) 2008, appengine-utilities project\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n- Redistributions of source code must retain the above copyright notice, this\n list of ... |
import Cookie
import os
from appengine_utilities import sessions
class SessionMiddleware(object):
TEST_COOKIE_NAME = 'testcookie'
TEST_COOKIE_VALUE = 'worked'
def process_request(self, request):
"""
Check to see if a valid session token exists, if not,
then use a cookie only session. It's up to the application
to convert the session to a datastore session. Once this
has been done, the session will continue to use the datastore
unless the writer is set to "cookie".
Setting the session to use the datastore is as easy as resetting
request.session anywhere if your application.
Example:
from common.appengine_utilities import sessions
request.session = sessions.Session()
"""
self.request = request
if sessions.Session.check_token():
request.session = sessions.Session()
else:
request.session = sessions.Session(writer="cookie")
request.session.set_test_cookie = self.set_test_cookie
request.session.test_cookie_worked = self.test_cookie_worked
request.session.delete_test_cookie = self.delete_test_cookie
request.session.save = self.save
def set_test_cookie(self):
string_cookie = os.environ.get('HTTP_COOKIE', '')
self.cookie = Cookie.SimpleCookie()
self.cookie.load(string_cookie)
self.cookie[self.TEST_COOKIE_NAME] = self.TEST_COOKIE_VALUE
print self.cookie
def test_cookie_worked(self):
string_cookie = os.environ.get('HTTP_COOKIE', '')
self.cookie = Cookie.SimpleCookie()
self.cookie.load(string_cookie)
return self.cookie.get(self.TEST_COOKIE_NAME)
def delete_test_cookie(self):
string_cookie = os.environ.get('HTTP_COOKIE', '')
self.cookie = Cookie.SimpleCookie()
self.cookie.load(string_cookie)
self.cookie[self.TEST_COOKIE_NAME] = ''
self.cookie[self.TEST_COOKIE_NAME]['path'] = '/'
self.cookie[self.TEST_COOKIE_NAME]['expires'] = 0
def save(self):
self.request.session = sessions.Session()
| [
[
1,
0,
0.0161,
0.0161,
0,
0.66,
0,
32,
0,
1,
0,
0,
32,
0,
0
],
[
1,
0,
0.0323,
0.0161,
0,
0.66,
0.3333,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0645,
0.0161,
0,
0.... | [
"import Cookie",
"import os",
"from appengine_utilities import sessions",
"class SessionMiddleware(object):\n TEST_COOKIE_NAME = 'testcookie'\n TEST_COOKIE_VALUE = 'worked'\n\n def process_request(self, request):\n \"\"\"\n Check to see if a valid session token exists, if not,\n ... |
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import __main__
class Event(object):
"""
Event is a simple publish/subscribe based event dispatcher
It sets itself to the __main__ function. In order to use it,
you must import it and __main__
"""
def __init__(self):
self.events = []
def subscribe(self, event, callback, args = None):
"""
This method will subscribe a callback function to an event name.
"""
if not {"event": event, "callback": callback, "args": args, } \
in self.events:
self.events.append({"event": event, "callback": callback, \
"args": args, })
def unsubscribe(self, event, callback, args = None):
"""
This method will unsubscribe a callback from an event.
"""
if {"event": event, "callback": callback, "args": args, }\
in self.events:
self.events.remove({"event": event, "callback": callback,\
"args": args, })
def fire_event(self, event = None):
"""
This method is what a method uses to fire an event,
initiating all registered callbacks
"""
for e in self.events:
if e["event"] == event:
if type(e["args"]) == type([]):
e["callback"](*e["args"])
elif type(e["args"]) == type({}):
e["callback"](**e["args"])
elif e["args"] == None:
e["callback"]()
else:
e["callback"](e["args"])
"""
Assign to the event class to __main__
"""
__main__.AEU_Events = Event()
| [
[
8,
0,
0.1776,
0.3421,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.3553,
0.0132,
0,
0.66,
0.25,
593,
0,
1,
0,
0,
593,
0,
0
],
[
3,
0,
0.6711,
0.5658,
0,
0.66,
... | [
"\"\"\"\nCopyright (c) 2008, appengine-utilities project\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n- Redistributions of source code must retain the above copyright notice, this\n list of ... |
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os
import sys
import Cookie
import pickle
from time import strftime
from django.utils import simplejson
COOKIE_NAME = 'appengine-utilities-flash'
class Flash(object):
"""
Send messages to the user between pages.
When you instantiate the class, the attribute 'msg' will be set from the
cookie, and the cookie will be deleted. If there is no flash cookie, 'msg'
will default to None.
To set a flash message for the next page, simply set the 'msg' attribute.
Example psuedocode:
if new_entity.put():
flash = Flash()
flash.msg = 'Your new entity has been created!'
return redirect_to_entity_list()
Then in the template on the next page:
{% if flash.msg %}
<div class="flash-msg">{{ flash.msg }}</div>
{% endif %}
"""
def __init__(self, cookie=None):
"""
Load the flash message and clear the cookie.
"""
self.no_cache_headers()
# load cookie
if cookie is None:
browser_cookie = os.environ.get('HTTP_COOKIE', '')
self.cookie = Cookie.SimpleCookie()
self.cookie.load(browser_cookie)
else:
self.cookie = cookie
# check for flash data
if self.cookie.get(COOKIE_NAME):
# set 'msg' attribute
cookie_val = self.cookie[COOKIE_NAME].value
# we don't want to trigger __setattr__(), which creates a cookie
try:
self.__dict__['msg'] = simplejson.loads(cookie_val)
except:
# not able to load the json, so do not set message. This should
# catch for when the browser doesn't delete the cookie in time for
# the next request, and only blanks out the content.
pass
# clear the cookie
self.cookie[COOKIE_NAME] = ''
self.cookie[COOKIE_NAME]['path'] = '/'
self.cookie[COOKIE_NAME]['expires'] = 0
print self.cookie[COOKIE_NAME]
else:
# default 'msg' attribute to None
self.__dict__['msg'] = None
def __setattr__(self, name, value):
"""
Create a cookie when setting the 'msg' attribute.
"""
if name == 'cookie':
self.__dict__['cookie'] = value
elif name == 'msg':
self.__dict__['msg'] = value
self.__dict__['cookie'][COOKIE_NAME] = simplejson.dumps(value)
self.__dict__['cookie'][COOKIE_NAME]['path'] = '/'
print self.cookie
else:
raise ValueError('You can only set the "msg" attribute.')
def no_cache_headers(self):
"""
Adds headers, avoiding any page caching in the browser. Useful for highly
dynamic sites.
"""
print "Expires: Tue, 03 Jul 2001 06:00:00 GMT"
print strftime("Last-Modified: %a, %d %b %y %H:%M:%S %Z")
print "Cache-Control: no-store, no-cache, must-revalidate, max-age=0"
print "Cache-Control: post-check=0, pre-check=0"
print "Pragma: no-cache"
| [
[
8,
0,
0.1134,
0.2185,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.2353,
0.0084,
0,
0.66,
0.125,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.2437,
0.0084,
0,
0.66,... | [
"\"\"\"\nCopyright (c) 2008, appengine-utilities project\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n- Redistributions of source code must retain the above copyright notice, this\n list of ... |
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from google.appengine.ext import db
class ROTModel(db.Model):
"""
ROTModel overrides the db.Model put function, having it retry
up to 3 times when it encounters a datastore timeout. This is
to try an maximize the chance the data makes it into the datastore
when attempted. If it fails, it raises the db.Timeout error and the
calling application will need to handle that.
"""
def put(self):
count = 0
while count < 3:
try:
return db.Model.put(self)
except db.Timeout:
count += 1
else:
raise db.Timeout()
| [
[
8,
0,
0.2935,
0.5652,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.6087,
0.0217,
0,
0.66,
0.5,
167,
0,
1,
0,
0,
167,
0,
0
],
[
3,
0,
0.8261,
0.3696,
0,
0.66,
... | [
"\"\"\"\nCopyright (c) 2008, appengine-utilities project\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n- Redistributions of source code must retain the above copyright notice, this\n list of ... |
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# main python imports
import datetime
import pickle
import random
import __main__
# google appengine import
from google.appengine.ext import db
from google.appengine.api import memcache
# settings
DEFAULT_TIMEOUT = 3600 # cache expires after one hour (3600 sec)
CLEAN_CHECK_PERCENT = 50 # 15% of all requests will clean the database
MAX_HITS_TO_CLEAN = 100 # the maximum number of cache hits to clean on attempt
class _AppEngineUtilities_Cache(db.Model):
# It's up to the application to determine the format of their keys
cachekey = db.StringProperty()
createTime = db.DateTimeProperty(auto_now_add=True)
timeout = db.DateTimeProperty()
value = db.BlobProperty()
class Cache(object):
"""
Cache is used for storing pregenerated output and/or objects in the Big
Table datastore to minimize the amount of queries needed for page
displays. The idea is that complex queries that generate the same
results really should only be run once. Cache can be used to store
pregenerated value made from queries (or other calls such as
urlFetch()), or the query objects themselves.
"""
def __init__(self, clean_check_percent = CLEAN_CHECK_PERCENT,
max_hits_to_clean = MAX_HITS_TO_CLEAN,
default_timeout = DEFAULT_TIMEOUT):
"""
Initializer
Args:
clean_check_percent: how often cache initialization should
run the cache cleanup
max_hits_to_clean: maximum number of stale hits to clean
default_timeout: default length a cache item is good for
"""
self.clean_check_percent = clean_check_percent
self.max_hits_to_clean = max_hits_to_clean
self.default_timeout = default_timeout
if random.randint(1, 100) < self.clean_check_percent:
self._clean_cache()
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheInitialized')
def _clean_cache(self):
"""
_clean_cache is a routine that is run to find and delete cache
items that are old. This helps keep the size of your over all
datastore down.
"""
query = _AppEngineUtilities_Cache.all()
query.filter('timeout < ', datetime.datetime.now())
results = query.fetch(self.max_hits_to_clean)
db.delete(results)
#for result in results:
# result.delete()
def _validate_key(self, key):
if key == None:
raise KeyError
def _validate_value(self, value):
if value == None:
raise ValueError
def _validate_timeout(self, timeout):
if timeout == None:
timeout = datetime.datetime.now() +\
datetime.timedelta(seconds=DEFAULT_TIMEOUT)
if type(timeout) == type(1):
timeout = datetime.datetime.now() + \
datetime.timedelta(seconds = timeout)
if type(timeout) != datetime.datetime:
raise TypeError
if timeout < datetime.datetime.now():
raise ValueError
return timeout
def add(self, key = None, value = None, timeout = None):
"""
add adds an entry to the cache, if one does not already
exist.
"""
self._validate_key(key)
self._validate_value(value)
timeout = self._validate_timeout(timeout)
if key in self:
raise KeyError
cacheEntry = _AppEngineUtilities_Cache()
cacheEntry.cachekey = key
cacheEntry.value = pickle.dumps(value)
cacheEntry.timeout = timeout
cacheEntry.put()
memcache_timeout = timeout - datetime.datetime.now()
memcache.set('cache-'+key, value, int(memcache_timeout.seconds))
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheAdded')
def set(self, key = None, value = None, timeout = None):
"""
add adds an entry to the cache, overwriting an existing value
if one already exists.
"""
self._validate_key(key)
self._validate_value(value)
timeout = self._validate_timeout(timeout)
cacheEntry = self._read(key)
if not cacheEntry:
cacheEntry = _AppEngineUtilities_Cache()
cacheEntry.cachekey = key
cacheEntry.value = pickle.dumps(value)
cacheEntry.timeout = timeout
cacheEntry.put()
memcache_timeout = timeout - datetime.datetime.now()
memcache.set('cache-'+key, value, int(memcache_timeout.seconds))
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheSet')
def _read(self, key = None):
"""
_read returns a cache object determined by the key. It's set
to private because it returns a db.Model object, and also
does not handle the unpickling of objects making it not the
best candidate for use. The special method __getitem__ is the
preferred access method for cache data.
"""
query = _AppEngineUtilities_Cache.all()
query.filter('cachekey', key)
query.filter('timeout > ', datetime.datetime.now())
results = query.fetch(1)
if len(results) is 0:
return None
return results[0]
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheReadFromDatastore')
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheRead')
def delete(self, key = None):
"""
Deletes a cache object determined by the key.
"""
memcache.delete('cache-'+key)
result = self._read(key)
if result:
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheDeleted')
result.delete()
def get(self, key):
"""
get is used to return the cache value associated with the key passed.
"""
mc = memcache.get('cache-'+key)
if mc:
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheReadFromMemcache')
if 'AEU_Events' in __main__.__dict__:
__main__.AEU_Events.fire_event('cacheRead')
return mc
result = self._read(key)
if result:
timeout = result.timeout - datetime.datetime.now()
# print timeout.seconds
memcache.set('cache-'+key, pickle.loads(result.value),
int(timeout.seconds))
return pickle.loads(result.value)
else:
raise KeyError
def get_many(self, keys):
"""
Returns a dict mapping each key in keys to its value. If the given
key is missing, it will be missing from the response dict.
"""
dict = {}
for key in keys:
value = self.get(key)
if value is not None:
dict[key] = val
return dict
def __getitem__(self, key):
"""
__getitem__ is necessary for this object to emulate a container.
"""
return self.get(key)
def __setitem__(self, key, value):
"""
__setitem__ is necessary for this object to emulate a container.
"""
return self.set(key, value)
def __delitem__(self, key):
"""
Implement the 'del' keyword
"""
return self.delete(key)
def __contains__(self, key):
"""
Implements "in" operator
"""
try:
r = self.__getitem__(key)
except KeyError:
return False
return True
def has_key(self, keyname):
"""
Equivalent to k in a, use that form in new code
"""
return self.__contains__(keyname)
| [
[
8,
0,
0.0509,
0.0981,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.1094,
0.0038,
0,
0.66,
0.0909,
426,
0,
1,
0,
0,
426,
0,
0
],
[
1,
0,
0.1132,
0.0038,
0,
0.66... | [
"\"\"\"\nCopyright (c) 2008, appengine-utilities project\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n- Redistributions of source code must retain the above copyright notice, this\n list of ... |
#-*- coding: utf-8 -*-
from google.appengine.api import users
from models.main import App_user
from django.http import HttpResponse, HttpResponseRedirect
import datetime
from logics.main import get_user, set_user
def logout(request):
url = request.GET.get('url', '/')
request.session['email'] = ''
return HttpResponseRedirect(users.create_logout_url(url))
class AuthMiddleware(object):
def process_request(self, request):
#先判断session
user_email = request.session.get('email')
app_user = None
if user_email:
app_user = App_user.all().filter('email', user_email).get()
else:
u = users.get_current_user()
if u:
user_email = u.email()
nickname = u.nickname()
is_admin = users.is_current_user_admin()
if is_admin:
level = 10
else:
level = 1
app_user = App_user.all().filter('email', user_email).get()
if app_user is None:
app_user = App_user(email = user_email, nickname = nickname, \
is_verified = True, add_time = datetime.datetime.utcnow())
app_user.level = level
app_user.put()
request.session['email'] = user_email
request.user = app_user
set_user(app_user) | [
[
1,
0,
0.0465,
0.0233,
0,
0.66,
0,
279,
0,
1,
0,
0,
279,
0,
0
],
[
1,
0,
0.0698,
0.0233,
0,
0.66,
0.1667,
524,
0,
1,
0,
0,
524,
0,
0
],
[
1,
0,
0.093,
0.0233,
0,
0... | [
"from google.appengine.api import users",
"from models.main import App_user",
"from django.http import HttpResponse, HttpResponseRedirect",
"import datetime",
"from logics.main import get_user, set_user",
"def logout(request):\n url = request.GET.get('url', '/')\n request.session['email'] = ''\n ... |
from django.conf import settings
def bind_settings(request):
return {
'settings': settings
} | [
[
1,
0,
0.1667,
0.1667,
0,
0.66,
0,
128,
0,
1,
0,
0,
128,
0,
0
],
[
2,
0,
0.75,
0.6667,
0,
0.66,
1,
846,
0,
1,
1,
0,
0,
0,
0
],
[
13,
1,
0.8333,
0.5,
1,
0.11,
0... | [
"from django.conf import settings",
"def bind_settings(request):\n return {\n 'settings': settings\n }",
" return {\n 'settings': settings\n }"
] |
#-*- coding: utf-8 -*-
from django.http import HttpResponsePermanentRedirect
from django.conf import settings
class Domain_transMiddleware(object):
def process_request(self, request):
base_url = settings.BASE_URL
domain = settings.DOMAIN
if request.META['SERVER_NAME']!='localhost' and request.META['SERVER_NAME']!=domain:
if request.META['QUERY_STRING']:
new_url = base_url + request.path +'?'+ request.META['QUERY_STRING']
else:
new_url = base_url + request.path
return HttpResponsePermanentRedirect(new_url)
| [
[
1,
0,
0.2,
0.0667,
0,
0.66,
0,
779,
0,
1,
0,
0,
779,
0,
0
],
[
1,
0,
0.2667,
0.0667,
0,
0.66,
0.5,
128,
0,
1,
0,
0,
128,
0,
0
],
[
3,
0,
0.7,
0.6667,
0,
0.66,
... | [
"from django.http import HttpResponsePermanentRedirect",
"from django.conf import settings",
"class Domain_transMiddleware(object):\n def process_request(self, request):\n base_url = settings.BASE_URL\n domain = settings.DOMAIN\n if request.META['SERVER_NAME']!='localhost' and request.M... |
#-*- coding: utf-8 -*-
from django import template
from models.main import App_user, Post, Comment, Tag, Sidebar
from django.template import Node, NodeList, Template, Context, Variable
register = template.Library()
class Sidebar_c(template.Node):
def __init__(self, nodelist_loop):
self.nodelist_loop = nodelist_loop
def render(self, context):
nodelist = NodeList()
sidebar_list = Sidebar.all().order('order').fetch(100)
context.push()
for sidebar in sidebar_list:
context.push()
context['sidebar'] = sidebar
for node in self.nodelist_loop:
nodelist.append(node.render(context))
context.pop()
context.pop()
return nodelist.render(context)
@register.tag(name="sidebar_list")
def do_sidebar(parser, token):
"""
{% sidebar_list %}
"""
nodelist_loop = parser.parse(('endsidebar_list',))
parser.delete_first_token()
return Sidebar_c(nodelist_loop)
class Space_line_lessNode(Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
from django.utils.html import strip_spaces_between_tags
html = strip_spaces_between_tags(self.nodelist.render(context).strip())
html = html.replace('\r\n', '').replace('\n', '')
return html
@register.tag(name="space_line_less")
def space_line_less(parser, token):
nodelist = parser.parse(('endspace_line_less',))
parser.delete_first_token()
return Space_line_lessNode(nodelist)
| [
[
1,
0,
0.0185,
0.0185,
0,
0.66,
0,
294,
0,
1,
0,
0,
294,
0,
0
],
[
1,
0,
0.037,
0.0185,
0,
0.66,
0.1429,
524,
0,
5,
0,
0,
524,
0,
0
],
[
1,
0,
0.0741,
0.0185,
0,
0... | [
"from django import template",
"from models.main import App_user, Post, Comment, Tag, Sidebar",
"from django.template import Node, NodeList, Template, Context, Variable",
"register = template.Library()",
"class Sidebar_c(template.Node):\n def __init__(self, nodelist_loop):\n self.nodelist_loop = n... |
#coding=utf-8
from django import template
from django.template.defaultfilters import stringfilter
import datetime
register = template.Library()
from common import time_utils
from common.utils import process_html
import hashlib
from google.appengine.api import users
def make_list(start, end):
"""
{{ start|make_list:end}}
"""
return range(start, end)
register.filter('make_list', make_list)
def human_time(target_time):
return time_utils.human_time(target_time)
register.filter('human_time', human_time)
def format_time(target_time):
return time_utils.time_str(target_time)
register.filter('format_time', format_time)
def rfc3339_time_str(dt):
return time_utils.rfc3339_time_str(dt)
register.filter('rfc3339_time_str', rfc3339_time_str)
def date_str(target_time):
return time_utils.date_str(target_time)
register.filter('date_str', date_str)
def only_time_str(target_time):
return time_utils.only_time_str(target_time)
register.filter('only_time_str', only_time_str)
def mod(value, divisor = 2):
"""
{{ value|make_list:divisor}}
"""
return (value%divisor == 0)
register.filter('mod', mod)
def logout_url(url):
return '/logout/?url=%s' % url
register.filter('logout_url', logout_url)
def login_url(url):
return users.create_login_url(url)
register.filter('login_url', login_url)
def process_html_show(html):
return process_html(html)
register.filter('process_html_show', process_html_show)
def md5(s):
return hashlib.md5(s).hexdigest()
register.filter('md5', md5)
def cover_space(s, autoescape=None):
return s.replace(' ', ' ')
cover_space.is_safe = True
cover_space.needs_autoescape = True
register.filter('cover_space', cover_space) | [
[
1,
0,
0.0303,
0.0152,
0,
0.66,
0,
294,
0,
1,
0,
0,
294,
0,
0
],
[
1,
0,
0.0455,
0.0152,
0,
0.66,
0.0303,
913,
0,
1,
0,
0,
913,
0,
0
],
[
1,
0,
0.0606,
0.0152,
0,
... | [
"from django import template",
"from django.template.defaultfilters import stringfilter",
"import datetime",
"register = template.Library()",
"from common import time_utils",
"from common.utils import process_html",
"import hashlib",
"from google.appengine.api import users",
"def make_list(start, en... |
#-*- coding: utf-8 -*-
import traceback, sys, cStringIO
import datetime, time, cgi
import pytz
from django.conf import settings
def get_utc_time(dt = None):
if dt is None:
dt = datetime.datetime.utcnow()
if dt.tzinfo is None:
dt = dt.replace(tzinfo = pytz.utc)
if dt.tzname()!='UTC':
dt = dt.astimezone(pytz.utc)
return dt
def get_local_time(dt = None):
utc_dt = get_utc_time(dt)
l_u_dt = utc_dt.astimezone(pytz.timezone(settings.TIME_ZONE))
return l_u_dt
def date_str(dt = None):
if dt is None:
dt = datetime.datetime.utcnow()
if isinstance(dt, datetime.datetime):
dt = get_local_time(dt)
return dt.strftime('%Y-%m-%d')
def time_str(dt = None):
dt = get_local_time(dt)
return dt.strftime('%Y-%m-%d %H:%M:%S %z')
def rfc3339_time_str(dt = None):
dt = get_utc_time(dt)
return dt.strftime('%Y-%m-%dT%H:%M:%SZ')
def only_time_str(dt = None):
dt = get_local_time(dt)
return dt.strftime('%H:%M:%S')
def get_time(dt = None):
utc_dt = get_utc_time(dt)
t = (utc_dt - datetime(1970, 1, 1, 0, 0, tzinfo = pytz.utc))
t = float(t.days*86400+t.seconds+t.microseconds/1000000.0)
return t
def parse_date(d):
return datetime.date(*map(int, d.split('-')))
def parse_time(d):
t = time.strptime(d, '%Y-%m-%d %H:%M:%S')
dt = datetime.datetime(*(t[0:6]), **({'tzinfo':get_local_time().tzinfo}))
dt = get_utc_time(dt)
return dt
def human_time(target_time):
now = datetime.datetime.utcnow()
tdelta = now - target_time
if tdelta.days < 0:
ret_time = u"就在刚才"
else:
if tdelta.days > 0:
ret_time = u"%d天前" % tdelta.days
elif (tdelta.seconds / (60*60)) > 0:
ret_time = u"%d小时前" % (tdelta.seconds / (60*60))
elif (tdelta.seconds / (60)) > 0:
ret_time = u"%d分钟前" % (tdelta.seconds / 60)
else:
ret_time = u"就在刚才"
return ret_time
| [
[
1,
0,
0.0253,
0.0127,
0,
0.66,
0,
423,
0,
3,
0,
0,
423,
0,
0
],
[
1,
0,
0.038,
0.0127,
0,
0.66,
0.0769,
426,
0,
3,
0,
0,
426,
0,
0
],
[
1,
0,
0.0506,
0.0127,
0,
0... | [
"import traceback, sys, cStringIO",
"import datetime, time, cgi",
"import pytz",
"from django.conf import settings",
"def get_utc_time(dt = None):\n if dt is None:\n dt = datetime.datetime.utcnow()\n if dt.tzinfo is None:\n dt = dt.replace(tzinfo = pytz.utc)\n \n if dt.tzname()!='U... |
#-*- coding: utf-8 -*-
import traceback, sys, cStringIO
import datetime, time, cgi
import pytz, re
from django.conf import settings
from BeautifulSoup import BeautifulSoup
def get_err():
f = cStringIO.StringIO( )
traceback.print_exc(file=f)
return f.getvalue( )
def print_err():
sys.stderr.write('err time: '+str(datetime.datetime.utcnow()))
traceback.print_exc(file=sys.stderr)
def gbk2utf8(s):
if type(s) == unicode:
return s.encode('utf8')
return s.decode('gb18030').encode('utf8')
def utf82gbk(s):
if type(s) == unicode:
return s.encode('gb18030')
return s.decode('utf8').encode('gb18030')
def get_mod(module_name):
mod = __import__(module_name)
components = module_name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def escape_dict(d):
for k in d:
if isinstance(d[k], str):
d[k] = cgi.escape(d[k])
return d
def ipdumps(ip):
if ip:
p = [int(a) for a in ip.split('.')]
return p[3]+ p[2]*256+ p[1]*256*256+ p[0]*256*256*256
else:
return None
def process_html(html):
code_tag = re.compile('\s*<pre name="code" class="([^"]+)">',
re.MULTILINE)
soup = BeautifulSoup(html)
clean_html = ''
for section in soup.contents:
txt = unicode(section)
matchobj = re.match(code_tag, txt)
if matchobj:
clean_html += re.sub(r'<br />', "\n", txt)
else:
clean_html += txt
return clean_html | [
[
1,
0,
0.0312,
0.0156,
0,
0.66,
0,
423,
0,
3,
0,
0,
423,
0,
0
],
[
1,
0,
0.0469,
0.0156,
0,
0.66,
0.0833,
426,
0,
3,
0,
0,
426,
0,
0
],
[
1,
0,
0.0625,
0.0156,
0,
... | [
"import traceback, sys, cStringIO",
"import datetime, time, cgi",
"import pytz, re",
"from django.conf import settings",
"from BeautifulSoup import BeautifulSoup",
"def get_err():\n f = cStringIO.StringIO( )\n traceback.print_exc(file=f)\n return f.getvalue( )",
" f = cStringIO.StringIO( )",... |
from google.appengine.api import users
from logics.main import get_user, set_user
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden
def role_required(role):
def wrapper(handler_method):
def check_login(request, *args, **kwargs):
user = get_user()
if not user:
if request.method != 'GET':
return HttpResponseForbidden()
else:
return HttpResponseRedirect(users.create_login_url(request.path))
elif role == "user" or (role == "admin" and user.level == 10):
return handler_method(request, *args, **kwargs)
else:
if request.method == 'GET':
return HttpResponseForbidden()
else:
return HttpResponseForbidden() # User didn't meet role.
return check_login
return wrapper | [
[
1,
0,
0.0455,
0.0455,
0,
0.66,
0,
279,
0,
1,
0,
0,
279,
0,
0
],
[
1,
0,
0.0909,
0.0455,
0,
0.66,
0.3333,
751,
0,
2,
0,
0,
751,
0,
0
],
[
1,
0,
0.1364,
0.0455,
0,
... | [
"from google.appengine.api import users",
"from logics.main import get_user, set_user",
"from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden",
"def role_required(role):\n def wrapper(handler_method):\n def check_login(request, *args, **kwargs):\n user = get_u... |
#!/usr/bin/python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
An interactive, stateful AJAX shell that runs Python code on the server.
Part of http://code.google.com/p/google-app-engine-samples/.
May be run as a standalone app or in an existing app as an admin-only handler.
Can be used for system administration tasks, as an interactive way to try out
APIs, or as a debugging aid during development.
The logging, os, sys, db, and users modules are imported automatically.
Interpreter state is stored in the datastore so that variables, function
definitions, and other values in the global and local namespaces can be used
across commands.
To use the shell in your app, copy shell.py, static/*, and templates/* into
your app's source directory. Then, copy the URL handlers from app.yaml into
your app.yaml.
TODO: unit tests!
"""
import logging
import new
import os
import pickle
import sys
import traceback
import types
import wsgiref.handlers
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
# Set to True if stack traces should be shown in the browser, etc.
_DEBUG = True
# The entity kind for shell sessions. Feel free to rename to suit your app.
_SESSION_KIND = '_Shell_Session'
# Types that can't be pickled.
UNPICKLABLE_TYPES = (
types.ModuleType,
types.TypeType,
types.ClassType,
types.FunctionType,
)
# Unpicklable statements to seed new sessions with.
INITIAL_UNPICKLABLES = [
'import logging',
'import os',
'import sys',
'from google.appengine.ext import db',
'from google.appengine.api import users',
]
class Session(db.Model):
"""A shell session. Stores the session's globals.
Each session globals is stored in one of two places:
If the global is picklable, it's stored in the parallel globals and
global_names list properties. (They're parallel lists to work around the
unfortunate fact that the datastore can't store dictionaries natively.)
If the global is not picklable (e.g. modules, classes, and functions), or if
it was created by the same statement that created an unpicklable global,
it's not stored directly. Instead, the statement is stored in the
unpicklables list property. On each request, before executing the current
statement, the unpicklable statements are evaluated to recreate the
unpicklable globals.
The unpicklable_names property stores all of the names of globals that were
added by unpicklable statements. When we pickle and store the globals after
executing a statement, we skip the ones in unpicklable_names.
Using Text instead of string is an optimization. We don't query on any of
these properties, so they don't need to be indexed.
"""
global_names = db.ListProperty(db.Text)
globals = db.ListProperty(db.Blob)
unpicklable_names = db.ListProperty(db.Text)
unpicklables = db.ListProperty(db.Text)
def set_global(self, name, value):
"""Adds a global, or updates it if it already exists.
Also removes the global from the list of unpicklable names.
Args:
name: the name of the global to remove
value: any picklable value
"""
blob = db.Blob(pickle.dumps(value))
if name in self.global_names:
index = self.global_names.index(name)
self.globals[index] = blob
else:
self.global_names.append(db.Text(name))
self.globals.append(blob)
self.remove_unpicklable_name(name)
def remove_global(self, name):
"""Removes a global, if it exists.
Args:
name: string, the name of the global to remove
"""
if name in self.global_names:
index = self.global_names.index(name)
del self.global_names[index]
del self.globals[index]
def globals_dict(self):
"""Returns a dictionary view of the globals.
"""
return dict((name, pickle.loads(val))
for name, val in zip(self.global_names, self.globals))
def add_unpicklable(self, statement, names):
"""Adds a statement and list of names to the unpicklables.
Also removes the names from the globals.
Args:
statement: string, the statement that created new unpicklable global(s).
names: list of strings; the names of the globals created by the statement.
"""
self.unpicklables.append(db.Text(statement))
for name in names:
self.remove_global(name)
if name not in self.unpicklable_names:
self.unpicklable_names.append(db.Text(name))
def remove_unpicklable_name(self, name):
"""Removes a name from the list of unpicklable names, if it exists.
Args:
name: string, the name of the unpicklable global to remove
"""
if name in self.unpicklable_names:
self.unpicklable_names.remove(name)
class FrontPageHandler(webapp.RequestHandler):
"""Creates a new session and renders the shell.html template.
"""
def get(self):
# set up the session. TODO: garbage collect old shell sessions
session_key = self.request.get('session')
if session_key:
session = Session.get(session_key)
else:
# create a new session
session = Session()
session.unpicklables = [db.Text(line) for line in INITIAL_UNPICKLABLES]
session_key = session.put()
template_file = os.path.join(os.path.dirname(__file__), '../templates',
'shell.html')
session_url = '/?session=%s' % session_key
vars = { 'server_software': os.environ['SERVER_SOFTWARE'],
'python_version': sys.version,
'session': str(session_key),
'user': users.get_current_user(),
'login_url': users.create_login_url(session_url),
'logout_url': users.create_logout_url(session_url),
}
rendered = webapp.template.render(template_file, vars, debug=_DEBUG)
self.response.out.write(rendered)
class StatementHandler(webapp.RequestHandler):
"""Evaluates a python statement in a given session and returns the result.
"""
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
# extract the statement to be run
statement = self.request.get('statement')
if not statement:
return
# the python compiler doesn't like network line endings
statement = statement.replace('\r\n', '\n')
# add a couple newlines at the end of the statement. this makes
# single-line expressions such as 'class Foo: pass' evaluate happily.
statement += '\n\n'
# log and compile the statement up front
try:
logging.info('Compiling and evaluating:\n%s' % statement)
compiled = compile(statement, '<string>', 'single')
except:
self.response.out.write(traceback.format_exc())
return
# create a dedicated module to be used as this statement's __main__
statement_module = new.module('__main__')
# use this request's __builtin__, since it changes on each request.
# this is needed for import statements, among other things.
import __builtin__
statement_module.__builtins__ = __builtin__
# load the session from the datastore
session = Session.get(self.request.get('session'))
# swap in our custom module for __main__. then unpickle the session
# globals, run the statement, and re-pickle the session globals, all
# inside it.
old_main = sys.modules.get('__main__')
try:
sys.modules['__main__'] = statement_module
statement_module.__name__ = '__main__'
# re-evaluate the unpicklables
for code in session.unpicklables:
exec code in statement_module.__dict__
# re-initialize the globals
for name, val in session.globals_dict().items():
try:
statement_module.__dict__[name] = val
except:
msg = 'Dropping %s since it could not be unpickled.\n' % name
self.response.out.write(msg)
logging.warning(msg + traceback.format_exc())
session.remove_global(name)
# run!
old_globals = dict(statement_module.__dict__)
try:
old_stdout = sys.stdout
old_stderr = sys.stderr
try:
sys.stdout = self.response.out
sys.stderr = self.response.out
exec compiled in statement_module.__dict__
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
except:
self.response.out.write(traceback.format_exc())
return
# extract the new globals that this statement added
new_globals = {}
for name, val in statement_module.__dict__.items():
if name not in old_globals or val != old_globals[name]:
new_globals[name] = val
if True in [isinstance(val, UNPICKLABLE_TYPES)
for val in new_globals.values()]:
# this statement added an unpicklable global. store the statement and
# the names of all of the globals it added in the unpicklables.
session.add_unpicklable(statement, new_globals.keys())
logging.debug('Storing this statement as an unpicklable.')
else:
# this statement didn't add any unpicklables. pickle and store the
# new globals back into the datastore.
for name, val in new_globals.items():
if not name.startswith('__'):
session.set_global(name, val)
finally:
sys.modules['__main__'] = old_main
session.put()
def main():
libs_path = os.path.abspath('./')
django_path = os.path.abspath('django.zip')
if django_path not in sys.path:
sys.path.insert(0, django_path)
if libs_path not in sys.path:
sys.path.insert(0, libs_path)
application = webapp.WSGIApplication(
[('/shell/', FrontPageHandler),
('/shell/shell/', StatementHandler)], debug=_DEBUG)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| [
[
8,
0,
0.0857,
0.0667,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.1238,
0.0032,
0,
0.66,
0.0476,
715,
0,
1,
0,
0,
715,
0,
0
],
[
1,
0,
0.127,
0.0032,
0,
0.66,... | [
"\"\"\"\nAn interactive, stateful AJAX shell that runs Python code on the server.\n\nPart of http://code.google.com/p/google-app-engine-samples/.\n\nMay be run as a standalone app or in an existing app as an admin-only handler.\nCan be used for system administration tasks, as an interactive way to try out\nAPIs, or... |
#-*- coding: utf-8 -*-
import socks_proxy as socks
import browser_agent as ai
import urllib2, os, threading, time
def get_one_proxy_opener(p):
#proxy=urllib2.ProxyHandler({'http':p[0]})
cookie=urllib2.HTTPCookieProcessor()
#opener=urllib2.build_opener(proxy, cookie)
opener=urllib2.build_opener(cookie)
return opener
def get_proxy_process(p):
# (
#['23', '124.128.224.2', '1080', 'SOCKS4', '\xc5\xb7\xd6\xde',
#'ProxyCN', '05-13', '20:00
#', '1.003', 'whois'],
#4.812000036239624)
p=p[0]
if p[3]=='SOCKS5':
proxy=ai.ProxyProcessor(p[1], int(p[2]), socks.PROXY_TYPE_SOCKS5)
elif p[3]=='SOCKS4':
proxy=ai.ProxyProcessor(p[1], int(p[2]), socks.PROXY_TYPE_SOCKS4)
elif p[3]=='HTTP':
proxy=ai.ProxyProcessor(p[1], int(p[2]), socks.PROXY_TYPE_HTTP)
return proxy
def test_proxy(s, p, success_proxys, thread_list):
print 'test : ', p
p=p.split()
try:
if p[3]=='SOCKS5':
proxy=ai.ProxyProcessor(p[1], int(p[2]), socks.PROXY_TYPE_SOCKS5)
elif p[3]=='SOCKS4':
proxy=ai.ProxyProcessor(p[1], int(p[2]), socks.PROXY_TYPE_SOCKS4)
elif p[3]=='HTTP':
proxy=ai.ProxyProcessor(p[1], int(p[2]), socks.PROXY_TYPE_HTTP)
opener=urllib2.build_opener(proxy)
st=time.time()
#res=ai.fetch('http://union.rekoo.com/test_proxy.do', fields=[('a','a')], opener=opener)
res=ai.fetch('http://kf.51.com/help/contact.php', opener=opener)
et=time.time()
#print res['data']
#if res['data']=='HTTP_X_FORWARDED_FOR: ':
if res['data'].find(u'联系方式'.encode('gbk'))!=-1:
success_proxys.append((p, et-st))
#return res['data']=='HTTP_X_FORWARDED_FOR: '
except Exception,e:
print e
finally:
s.release()
thread_list.remove(threading.currentThread())
print -1
def get_proxys():
BASE_PATH=os.path.dirname(__file__).replace('\\','/')
if BASE_PATH:
BASE_PATH+='/'
proxys=open(BASE_PATH+'proxy.txt').readlines()
s=threading.Semaphore(50)
success_proxys=[] #成功的代理
thread_list=[]
for p in proxys:
s.acquire(True)
thr=threading.Thread(target=test_proxy, args=(s, p, success_proxys, thread_list))
thr.start()
thread_list.append(thr)
total_wait_time=0
while 1:
print 'test proxy : '+'#'*20, len(thread_list)
if len(thread_list)==0:
break
alive=False
for thr in thread_list:
if thr.isAlive():
alive=True
if alive:
time.sleep(2)
total_wait_time+=2
if total_wait_time>30:
break
else:
break
#success_proxys.sort((lambda a,b:cmp(a[1], b[1])))
#print success_proxys
return success_proxys
if __name__=='__main__':
get_proxys() | [
[
1,
0,
0.0286,
0.0095,
0,
0.66,
0,
525,
0,
1,
0,
0,
525,
0,
0
],
[
1,
0,
0.0381,
0.0095,
0,
0.66,
0.1429,
33,
0,
1,
0,
0,
33,
0,
0
],
[
1,
0,
0.0476,
0.0095,
0,
0.... | [
"import socks_proxy as socks",
"import browser_agent as ai",
"import urllib2, os, threading, time",
"def get_one_proxy_opener(p):\n #proxy=urllib2.ProxyHandler({'http':p[0]})\n cookie=urllib2.HTTPCookieProcessor()\n\n #opener=urllib2.build_opener(proxy, cookie)\n opener=urllib2.build_opener(cookie... |
from browser_agent import * | [
[
1,
0,
1,
1,
0,
0.66,
0,
33,
0,
1,
0,
0,
33,
0,
0
]
] | [
"from browser_agent import *"
] |
#!/usr/bin/env python
'''
$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $
'''
from cStringIO import StringIO
from datetime import datetime, timedelta
from struct import unpack, calcsize
from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo
from pytz.tzinfo import memorized_datetime, memorized_timedelta
def build_tzinfo(zone, fp):
head_fmt = '>4s 16x 6l'
head_size = calcsize(head_fmt)
(magic,ttisgmtcnt,ttisstdcnt,leapcnt,
timecnt,typecnt,charcnt) = unpack(head_fmt, fp.read(head_size))
# Make sure it is a tzinfo(5) file
assert magic == 'TZif'
# Read out the transition times, localtime indices and ttinfo structures.
data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict(
timecnt=timecnt, ttinfo='lBB'*typecnt, charcnt=charcnt)
data_size = calcsize(data_fmt)
data = unpack(data_fmt, fp.read(data_size))
# make sure we unpacked the right number of values
assert len(data) == 2 * timecnt + 3 * typecnt + 1
transitions = [memorized_datetime(trans)
for trans in data[:timecnt]]
lindexes = list(data[timecnt:2 * timecnt])
ttinfo_raw = data[2 * timecnt:-1]
tznames_raw = data[-1]
del data
# Process ttinfo into separate structs
ttinfo = []
tznames = {}
i = 0
while i < len(ttinfo_raw):
# have we looked up this timezone name yet?
tzname_offset = ttinfo_raw[i+2]
if tzname_offset not in tznames:
nul = tznames_raw.find('\0', tzname_offset)
if nul < 0:
nul = len(tznames_raw)
tznames[tzname_offset] = tznames_raw[tzname_offset:nul]
ttinfo.append((ttinfo_raw[i],
bool(ttinfo_raw[i+1]),
tznames[tzname_offset]))
i += 3
# Now build the timezone object
if len(transitions) == 0:
ttinfo[0][0], ttinfo[0][2]
cls = type(zone, (StaticTzInfo,), dict(
zone=zone,
_utcoffset=memorized_timedelta(ttinfo[0][0]),
_tzname=ttinfo[0][2]))
else:
# Early dates use the first standard time ttinfo
i = 0
while ttinfo[i][1]:
i += 1
if ttinfo[i] == ttinfo[lindexes[0]]:
transitions[0] = datetime.min
else:
transitions.insert(0, datetime.min)
lindexes.insert(0, i)
# calculate transition info
transition_info = []
for i in range(len(transitions)):
inf = ttinfo[lindexes[i]]
utcoffset = inf[0]
if not inf[1]:
dst = 0
else:
for j in range(i-1, -1, -1):
prev_inf = ttinfo[lindexes[j]]
if not prev_inf[1]:
break
dst = inf[0] - prev_inf[0] # dst offset
tzname = inf[2]
# Round utcoffset and dst to the nearest minute or the
# datetime library will complain. Conversions to these timezones
# might be up to plus or minus 30 seconds out, but it is
# the best we can do.
utcoffset = int((utcoffset + 30) / 60) * 60
dst = int((dst + 30) / 60) * 60
transition_info.append(memorized_ttinfo(utcoffset, dst, tzname))
cls = type(zone, (DstTzInfo,), dict(
zone=zone,
_utc_transition_times=transitions,
_transition_info=transition_info))
return cls()
if __name__ == '__main__':
import os.path
from pprint import pprint
base = os.path.join(os.path.dirname(__file__), 'zoneinfo')
tz = build_tzinfo('Australia/Melbourne',
open(os.path.join(base,'Australia','Melbourne'), 'rb'))
tz = build_tzinfo('US/Eastern',
open(os.path.join(base,'US','Eastern'), 'rb'))
pprint(tz._utc_transition_times)
#print tz.asPython(4)
#print tz.transitions_mapping
| [
[
8,
0,
0.0265,
0.0265,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0531,
0.0088,
0,
0.66,
0.1429,
764,
0,
1,
0,
0,
764,
0,
0
],
[
1,
0,
0.0619,
0.0088,
0,
0.66... | [
"'''\n$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $\n'''",
"from cStringIO import StringIO",
"from datetime import datetime, timedelta",
"from struct import unpack, calcsize",
"from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo",
"from pytz.tzinfo import memorized_datetime, memorize... |
#!/usr/bin/env python
'''
$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $
'''
from cStringIO import StringIO
from datetime import datetime, timedelta
from struct import unpack, calcsize
from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo
from pytz.tzinfo import memorized_datetime, memorized_timedelta
def build_tzinfo(zone, fp):
head_fmt = '>4s 16x 6l'
head_size = calcsize(head_fmt)
(magic,ttisgmtcnt,ttisstdcnt,leapcnt,
timecnt,typecnt,charcnt) = unpack(head_fmt, fp.read(head_size))
# Make sure it is a tzinfo(5) file
assert magic == 'TZif'
# Read out the transition times, localtime indices and ttinfo structures.
data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict(
timecnt=timecnt, ttinfo='lBB'*typecnt, charcnt=charcnt)
data_size = calcsize(data_fmt)
data = unpack(data_fmt, fp.read(data_size))
# make sure we unpacked the right number of values
assert len(data) == 2 * timecnt + 3 * typecnt + 1
transitions = [memorized_datetime(trans)
for trans in data[:timecnt]]
lindexes = list(data[timecnt:2 * timecnt])
ttinfo_raw = data[2 * timecnt:-1]
tznames_raw = data[-1]
del data
# Process ttinfo into separate structs
ttinfo = []
tznames = {}
i = 0
while i < len(ttinfo_raw):
# have we looked up this timezone name yet?
tzname_offset = ttinfo_raw[i+2]
if tzname_offset not in tznames:
nul = tznames_raw.find('\0', tzname_offset)
if nul < 0:
nul = len(tznames_raw)
tznames[tzname_offset] = tznames_raw[tzname_offset:nul]
ttinfo.append((ttinfo_raw[i],
bool(ttinfo_raw[i+1]),
tznames[tzname_offset]))
i += 3
# Now build the timezone object
if len(transitions) == 0:
ttinfo[0][0], ttinfo[0][2]
cls = type(zone, (StaticTzInfo,), dict(
zone=zone,
_utcoffset=memorized_timedelta(ttinfo[0][0]),
_tzname=ttinfo[0][2]))
else:
# Early dates use the first standard time ttinfo
i = 0
while ttinfo[i][1]:
i += 1
if ttinfo[i] == ttinfo[lindexes[0]]:
transitions[0] = datetime.min
else:
transitions.insert(0, datetime.min)
lindexes.insert(0, i)
# calculate transition info
transition_info = []
for i in range(len(transitions)):
inf = ttinfo[lindexes[i]]
utcoffset = inf[0]
if not inf[1]:
dst = 0
else:
for j in range(i-1, -1, -1):
prev_inf = ttinfo[lindexes[j]]
if not prev_inf[1]:
break
dst = inf[0] - prev_inf[0] # dst offset
tzname = inf[2]
# Round utcoffset and dst to the nearest minute or the
# datetime library will complain. Conversions to these timezones
# might be up to plus or minus 30 seconds out, but it is
# the best we can do.
utcoffset = int((utcoffset + 30) / 60) * 60
dst = int((dst + 30) / 60) * 60
transition_info.append(memorized_ttinfo(utcoffset, dst, tzname))
cls = type(zone, (DstTzInfo,), dict(
zone=zone,
_utc_transition_times=transitions,
_transition_info=transition_info))
return cls()
if __name__ == '__main__':
import os.path
from pprint import pprint
base = os.path.join(os.path.dirname(__file__), 'zoneinfo')
tz = build_tzinfo('Australia/Melbourne',
open(os.path.join(base,'Australia','Melbourne'), 'rb'))
tz = build_tzinfo('US/Eastern',
open(os.path.join(base,'US','Eastern'), 'rb'))
pprint(tz._utc_transition_times)
#print tz.asPython(4)
#print tz.transitions_mapping
| [
[
8,
0,
0.0265,
0.0265,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0531,
0.0088,
0,
0.66,
0.1429,
764,
0,
1,
0,
0,
764,
0,
0
],
[
1,
0,
0.0619,
0.0088,
0,
0.66... | [
"'''\n$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $\n'''",
"from cStringIO import StringIO",
"from datetime import datetime, timedelta",
"from struct import unpack, calcsize",
"from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo",
"from pytz.tzinfo import memorized_datetime, memorize... |
'''
Reference tzinfo implementations from the Python docs.
Used for testing against as they are only correct for the years
1987 to 2006. Do not use these for real code.
'''
from datetime import tzinfo, timedelta, datetime
from pytz import utc, UTC, HOUR, ZERO
# A class building tzinfo objects for fixed-offset time zones.
# Note that FixedOffset(0, "UTC") is a different way to build a
# UTC tzinfo object.
class FixedOffset(tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = timedelta(minutes = offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return ZERO
# A class capturing the platform's idea of local time.
import time as _time
STDOFFSET = timedelta(seconds = -_time.timezone)
if _time.daylight:
DSTOFFSET = timedelta(seconds = -_time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
class LocalTimezone(tzinfo):
def utcoffset(self, dt):
if self._isdst(dt):
return DSTOFFSET
else:
return STDOFFSET
def dst(self, dt):
if self._isdst(dt):
return DSTDIFF
else:
return ZERO
def tzname(self, dt):
return _time.tzname[self._isdst(dt)]
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.weekday(), 0, -1)
stamp = _time.mktime(tt)
tt = _time.localtime(stamp)
return tt.tm_isdst > 0
Local = LocalTimezone()
# A complete implementation of current DST rules for major US time zones.
def first_sunday_on_or_after(dt):
days_to_go = 6 - dt.weekday()
if days_to_go:
dt += timedelta(days_to_go)
return dt
# In the US, DST starts at 2am (standard time) on the first Sunday in April.
DSTSTART = datetime(1, 4, 1, 2)
# and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct.
# which is the first Sunday on or after Oct 25.
DSTEND = datetime(1, 10, 25, 1)
class USTimeZone(tzinfo):
def __init__(self, hours, reprname, stdname, dstname):
self.stdoffset = timedelta(hours=hours)
self.reprname = reprname
self.stdname = stdname
self.dstname = dstname
def __repr__(self):
return self.reprname
def tzname(self, dt):
if self.dst(dt):
return self.dstname
else:
return self.stdname
def utcoffset(self, dt):
return self.stdoffset + self.dst(dt)
def dst(self, dt):
if dt is None or dt.tzinfo is None:
# An exception may be sensible here, in one or both cases.
# It depends on how you want to treat them. The default
# fromutc() implementation (called by the default astimezone()
# implementation) passes a datetime with dt.tzinfo is self.
return ZERO
assert dt.tzinfo is self
# Find first Sunday in April & the last in October.
start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year))
end = first_sunday_on_or_after(DSTEND.replace(year=dt.year))
# Can't compare naive to aware objects, so strip the timezone from
# dt first.
if start <= dt.replace(tzinfo=None) < end:
return HOUR
else:
return ZERO
Eastern = USTimeZone(-5, "Eastern", "EST", "EDT")
Central = USTimeZone(-6, "Central", "CST", "CDT")
Mountain = USTimeZone(-7, "Mountain", "MST", "MDT")
Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
| [
[
8,
0,
0.0236,
0.0394,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0551,
0.0079,
0,
0.66,
0.0588,
426,
0,
3,
0,
0,
426,
0,
0
],
[
1,
0,
0.063,
0.0079,
0,
0.66,... | [
"'''\nReference tzinfo implementations from the Python docs.\nUsed for testing against as they are only correct for the years\n1987 to 2006. Do not use these for real code.\n'''",
"from datetime import tzinfo, timedelta, datetime",
"from pytz import utc, UTC, HOUR, ZERO",
"class FixedOffset(tzinfo):\n \"\"... |
#!/usr/bin/python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
An interactive, stateful AJAX shell that runs Python code on the server.
Part of http://code.google.com/p/google-app-engine-samples/.
May be run as a standalone app or in an existing app as an admin-only handler.
Can be used for system administration tasks, as an interactive way to try out
APIs, or as a debugging aid during development.
The logging, os, sys, db, and users modules are imported automatically.
Interpreter state is stored in the datastore so that variables, function
definitions, and other values in the global and local namespaces can be used
across commands.
To use the shell in your app, copy shell.py, static/*, and templates/* into
your app's source directory. Then, copy the URL handlers from app.yaml into
your app.yaml.
TODO: unit tests!
"""
import logging
import new
import os
import pickle
import sys
import traceback
import types
import wsgiref.handlers
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
# Set to True if stack traces should be shown in the browser, etc.
_DEBUG = True
# The entity kind for shell sessions. Feel free to rename to suit your app.
_SESSION_KIND = '_Shell_Session'
# Types that can't be pickled.
UNPICKLABLE_TYPES = (
types.ModuleType,
types.TypeType,
types.ClassType,
types.FunctionType,
)
# Unpicklable statements to seed new sessions with.
INITIAL_UNPICKLABLES = [
'import logging',
'import os',
'import sys',
'from google.appengine.ext import db',
'from google.appengine.api import users',
]
class Session(db.Model):
"""A shell session. Stores the session's globals.
Each session globals is stored in one of two places:
If the global is picklable, it's stored in the parallel globals and
global_names list properties. (They're parallel lists to work around the
unfortunate fact that the datastore can't store dictionaries natively.)
If the global is not picklable (e.g. modules, classes, and functions), or if
it was created by the same statement that created an unpicklable global,
it's not stored directly. Instead, the statement is stored in the
unpicklables list property. On each request, before executing the current
statement, the unpicklable statements are evaluated to recreate the
unpicklable globals.
The unpicklable_names property stores all of the names of globals that were
added by unpicklable statements. When we pickle and store the globals after
executing a statement, we skip the ones in unpicklable_names.
Using Text instead of string is an optimization. We don't query on any of
these properties, so they don't need to be indexed.
"""
global_names = db.ListProperty(db.Text)
globals = db.ListProperty(db.Blob)
unpicklable_names = db.ListProperty(db.Text)
unpicklables = db.ListProperty(db.Text)
def set_global(self, name, value):
"""Adds a global, or updates it if it already exists.
Also removes the global from the list of unpicklable names.
Args:
name: the name of the global to remove
value: any picklable value
"""
blob = db.Blob(pickle.dumps(value))
if name in self.global_names:
index = self.global_names.index(name)
self.globals[index] = blob
else:
self.global_names.append(db.Text(name))
self.globals.append(blob)
self.remove_unpicklable_name(name)
def remove_global(self, name):
"""Removes a global, if it exists.
Args:
name: string, the name of the global to remove
"""
if name in self.global_names:
index = self.global_names.index(name)
del self.global_names[index]
del self.globals[index]
def globals_dict(self):
"""Returns a dictionary view of the globals.
"""
return dict((name, pickle.loads(val))
for name, val in zip(self.global_names, self.globals))
def add_unpicklable(self, statement, names):
"""Adds a statement and list of names to the unpicklables.
Also removes the names from the globals.
Args:
statement: string, the statement that created new unpicklable global(s).
names: list of strings; the names of the globals created by the statement.
"""
self.unpicklables.append(db.Text(statement))
for name in names:
self.remove_global(name)
if name not in self.unpicklable_names:
self.unpicklable_names.append(db.Text(name))
def remove_unpicklable_name(self, name):
"""Removes a name from the list of unpicklable names, if it exists.
Args:
name: string, the name of the unpicklable global to remove
"""
if name in self.unpicklable_names:
self.unpicklable_names.remove(name)
class FrontPageHandler(webapp.RequestHandler):
"""Creates a new session and renders the shell.html template.
"""
def get(self):
# set up the session. TODO: garbage collect old shell sessions
session_key = self.request.get('session')
if session_key:
session = Session.get(session_key)
else:
# create a new session
session = Session()
session.unpicklables = [db.Text(line) for line in INITIAL_UNPICKLABLES]
session_key = session.put()
template_file = os.path.join(os.path.dirname(__file__), '../templates',
'shell.html')
session_url = '/?session=%s' % session_key
vars = { 'server_software': os.environ['SERVER_SOFTWARE'],
'python_version': sys.version,
'session': str(session_key),
'user': users.get_current_user(),
'login_url': users.create_login_url(session_url),
'logout_url': users.create_logout_url(session_url),
}
rendered = webapp.template.render(template_file, vars, debug=_DEBUG)
self.response.out.write(rendered)
class StatementHandler(webapp.RequestHandler):
"""Evaluates a python statement in a given session and returns the result.
"""
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
# extract the statement to be run
statement = self.request.get('statement')
if not statement:
return
# the python compiler doesn't like network line endings
statement = statement.replace('\r\n', '\n')
# add a couple newlines at the end of the statement. this makes
# single-line expressions such as 'class Foo: pass' evaluate happily.
statement += '\n\n'
# log and compile the statement up front
try:
logging.info('Compiling and evaluating:\n%s' % statement)
compiled = compile(statement, '<string>', 'single')
except:
self.response.out.write(traceback.format_exc())
return
# create a dedicated module to be used as this statement's __main__
statement_module = new.module('__main__')
# use this request's __builtin__, since it changes on each request.
# this is needed for import statements, among other things.
import __builtin__
statement_module.__builtins__ = __builtin__
# load the session from the datastore
session = Session.get(self.request.get('session'))
# swap in our custom module for __main__. then unpickle the session
# globals, run the statement, and re-pickle the session globals, all
# inside it.
old_main = sys.modules.get('__main__')
try:
sys.modules['__main__'] = statement_module
statement_module.__name__ = '__main__'
# re-evaluate the unpicklables
for code in session.unpicklables:
exec code in statement_module.__dict__
# re-initialize the globals
for name, val in session.globals_dict().items():
try:
statement_module.__dict__[name] = val
except:
msg = 'Dropping %s since it could not be unpickled.\n' % name
self.response.out.write(msg)
logging.warning(msg + traceback.format_exc())
session.remove_global(name)
# run!
old_globals = dict(statement_module.__dict__)
try:
old_stdout = sys.stdout
old_stderr = sys.stderr
try:
sys.stdout = self.response.out
sys.stderr = self.response.out
exec compiled in statement_module.__dict__
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
except:
self.response.out.write(traceback.format_exc())
return
# extract the new globals that this statement added
new_globals = {}
for name, val in statement_module.__dict__.items():
if name not in old_globals or val != old_globals[name]:
new_globals[name] = val
if True in [isinstance(val, UNPICKLABLE_TYPES)
for val in new_globals.values()]:
# this statement added an unpicklable global. store the statement and
# the names of all of the globals it added in the unpicklables.
session.add_unpicklable(statement, new_globals.keys())
logging.debug('Storing this statement as an unpicklable.')
else:
# this statement didn't add any unpicklables. pickle and store the
# new globals back into the datastore.
for name, val in new_globals.items():
if not name.startswith('__'):
session.set_global(name, val)
finally:
sys.modules['__main__'] = old_main
session.put()
def main():
libs_path = os.path.abspath('./')
django_path = os.path.abspath('django.zip')
if django_path not in sys.path:
sys.path.insert(0, django_path)
if libs_path not in sys.path:
sys.path.insert(0, libs_path)
application = webapp.WSGIApplication(
[('/shell/', FrontPageHandler),
('/shell/shell/', StatementHandler)], debug=_DEBUG)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| [
[
8,
0,
0.0857,
0.0667,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.1238,
0.0032,
0,
0.66,
0.0476,
715,
0,
1,
0,
0,
715,
0,
0
],
[
1,
0,
0.127,
0.0032,
0,
0.66,... | [
"\"\"\"\nAn interactive, stateful AJAX shell that runs Python code on the server.\n\nPart of http://code.google.com/p/google-app-engine-samples/.\n\nMay be run as a standalone app or in an existing app as an admin-only handler.\nCan be used for system administration tasks, as an interactive way to try out\nAPIs, or... |
#-*- coding: utf-8 -*-
import math
class Pager(object):
def __init__(self, total, pageSize, page, page_list_num = 10):
pageCount = int(math.ceil((total+0.0)/pageSize))
if page < 1:
page = 1
if page > pageCount:
page = pageCount
offset = 2
if page_list_num >= pageCount:
from_page = 1
to_page = pageCount
else:
from_page = page - offset
to_page = from_page + page_list_num -1
if from_page < 1:
to_page = page + 1 - from_page
from_page = 1
if to_page - from_page < page_list_num:
to_page = page_list_num
elif to_page > pageCount:
from_page = pageCount - page_list_num + 1
to_page = pageCount
if page - offset >1 and pageCount > page_list_num:
show_first = True
else:
show_first = False
if page > 1:
show_pre = True
else:
show_pre = False
if page < pageCount:
show_next = True
else:
show_next = False
if to_page < pageCount:
show_end = True
else:
show_end = False
self.total = total
self.pageSize = pageSize
self.page = page
self.pageCount = pageCount
self.from_page = from_page
self.to_page = to_page
self.page_list = range(from_page, to_page+1)
self.show_first = show_first
self.show_end = show_end
self.show_pre = show_pre
self.show_next = show_next
def render(self, link=''):
html = []
pager = self
html.append('<span>共%s条</span>' % pager.total)
if link == '':
link = '?'
else:
if link.find('?') == -1:
link += '?'
else:
link += '&'
if pager.show_pre:
html.append('<a href="%spage=%s">上一页</a>' % (link, pager.page-1))
# else:
# html.append('<span class="disabled">上一页</span>')
if pager.show_first:
html.append('<a href="%spage=1">1</a><span>...</span>' % link)
for p in pager.page_list:
if p == pager.page:
html.append('<span class="current">%s</span>' % pager.page)
else:
html.append('<a href="%spage=%s">%s</a>' % (link, p, p))
if pager.show_end :
html.append('<span>...</span><a href="%spage=%s">%s</a>' % (link, pager.pageCount, pager.pageCount))
if pager.show_next:
html.append('<a href="%spage=%s">下一页</a>' % (link, pager.page + 1))
# else:
# html.append('<span class="disabled">下一页</span>')
return ''.join(html)
def render_tpl(self):
return """
共{{ pager.total }}条
{% if pager.show_pre %}
<a href="?page={{ pager.page|add:"-1" }}">上一页</a>
{% else %}
<span class="disabled">上一页</span>
{% endif %}
{% if pager.show_first %}
<a href="?page=1">1</a>...
{% endif %}
{% for p in pager.page_list %}
{% ifequal p pager.page %}
<span class="current">{{ pager.page }}</span>
{% else %}
<a href="?page={{ p }}">{{ p }}</a>
{% endifequal %}
{% endfor %}
{% if pager.show_end %}
...<a href="?page={{ pager.pageCount }}">{{ pager.pageCount }}</a>
{% endif %}
{% if pager.show_next %}
<a href="?page={{ pager.page|add:"1" }}">下一页</a>
{% else %}
<span class="disabled">下一页</span>
{% endif %}
"""
| [
[
1,
0,
0.0154,
0.0077,
0,
0.66,
0,
526,
0,
1,
0,
0,
526,
0,
0
],
[
3,
0,
0.5154,
0.9769,
0,
0.66,
1,
264,
0,
3,
0,
0,
186,
0,
12
],
[
2,
1,
0.25,
0.4308,
1,
0.51,
... | [
"import math",
"class Pager(object):\n def __init__(self, total, pageSize, page, page_list_num = 10):\n pageCount = int(math.ceil((total+0.0)/pageSize))\n if page < 1:\n page = 1\n if page > pageCount:\n page = pageCount",
" def __init__(self, total, pageSize, pa... |
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.appengine.api import memcache
from google.appengine.ext import db
import random
class GeneralCounterShardConfig(db.Model):
"""Tracks the number of shards for each named counter."""
name = db.StringProperty(required=True)
num_shards = db.IntegerProperty(required=True, default=20)
class GeneralCounterShard(db.Model):
"""Shards for each named counter"""
name = db.StringProperty(required=True)
count = db.IntegerProperty(required=True, default=0)
def get_count(name):
"""Retrieve the value for a given sharded counter.
Parameters:
name - The name of the counter
"""
total = memcache.get(name)
if total is None:
total = 0
for counter in GeneralCounterShard.all().filter('name = ', name):
total += counter.count
memcache.add(name, str(total), 60)
else:
total = int(total)
return total
def increment(name):
"""Increment the value for a given sharded counter.
Parameters:
name - The name of the counter
"""
config = GeneralCounterShardConfig.get_or_insert(name, name=name)
def txn():
index = random.randint(0, config.num_shards - 1)
shard_name = name + str(index)
counter = GeneralCounterShard.get_by_key_name(shard_name)
if counter is None:
counter = GeneralCounterShard(key_name=shard_name, name=name)
counter.count += 1
counter.put()
db.run_in_transaction(txn)
memcache.incr(name)
def decrement(name):
"""Increment the value for a given sharded counter.
Parameters:
name - The name of the counter
"""
config = GeneralCounterShardConfig.get_or_insert(name, name=name)
def txn():
index = random.randint(0, config.num_shards - 1)
shard_name = name + str(index)
counter = GeneralCounterShard.get_by_key_name(shard_name)
if counter is None:
counter = GeneralCounterShard(key_name=shard_name, name=name)
counter.count -= 1
counter.put()
db.run_in_transaction(txn)
memcache.decr(name)
def increase_shards(name, num):
"""Increase the number of shards for a given sharded counter.
Will never decrease the number of shards.
Parameters:
name - The name of the counter
num - How many shards to use
"""
config = GeneralCounterShardConfig.get_or_insert(name, name=name)
def txn():
if config.num_shards < num:
config.num_shards = num
config.put()
db.run_in_transaction(txn)
| [
[
1,
0,
0.16,
0.01,
0,
0.66,
0,
279,
0,
1,
0,
0,
279,
0,
0
],
[
1,
0,
0.17,
0.01,
0,
0.66,
0.125,
167,
0,
1,
0,
0,
167,
0,
0
],
[
1,
0,
0.18,
0.01,
0,
0.66,
0.2... | [
"from google.appengine.api import memcache",
"from google.appengine.ext import db",
"import random",
"class GeneralCounterShardConfig(db.Model):\n \"\"\"Tracks the number of shards for each named counter.\"\"\"\n name = db.StringProperty(required=True)\n num_shards = db.IntegerProperty(required=True, defau... |
#!/usr/bin/env python
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import string
import sys
###############################################################################
# match "#00 pc 0003f52e /system/lib/libdvm.so" for example
###############################################################################
trace_line = re.compile("(.*)(\#[0-9]+) (..) ([0-9a-f]{8}) ([^\r\n \t]*)")
# returns a list containing the function name and the file/lineno
def CallAddr2Line(lib, addr):
global symbols_dir
global addr2line_cmd
global cppfilt_cmd
if lib != "":
cmd = addr2line_cmd + \
" -f -e " + symbols_dir + lib + " 0x" + addr
stream = os.popen(cmd)
lines = stream.readlines()
list = map(string.strip, lines)
else:
list = []
if list != []:
# Name like "move_forward_type<JavaVMOption>" causes troubles
mangled_name = re.sub('<', '\<', list[0]);
mangled_name = re.sub('>', '\>', mangled_name);
cmd = cppfilt_cmd + " " + mangled_name
stream = os.popen(cmd)
list[0] = stream.readline()
stream.close()
list = map(string.strip, list)
else:
list = [ "(unknown)", "(unknown)" ]
return list
###############################################################################
# similar to CallAddr2Line, but using objdump to find out the name of the
# containing function of the specified address
###############################################################################
def CallObjdump(lib, addr):
global objdump_cmd
global symbols_dir
unknown = "(unknown)"
uname = os.uname()[0]
if uname == "Darwin":
proc = os.uname()[-1]
if proc == "i386":
uname = "darwin-x86"
else:
uname = "darwin-ppc"
elif uname == "Linux":
uname = "linux-x86"
if lib != "":
next_addr = string.atoi(addr, 16) + 1
cmd = objdump_cmd \
+ " -C -d --start-address=0x" + addr + " --stop-address=" \
+ str(next_addr) \
+ " " + symbols_dir + lib
stream = os.popen(cmd)
lines = stream.readlines()
map(string.strip, lines)
stream.close()
else:
return unknown
# output looks like
#
# file format elf32-littlearm
#
# Disassembly of section .text:
#
# 0000833c <func+0x4>:
# 833c: 701a strb r2, [r3, #0]
#
# we want to extract the "func" part
num_lines = len(lines)
if num_lines < 2:
return unknown
func_name = lines[num_lines-2]
func_regexp = re.compile("(^.*\<)(.*)(\+.*\>:$)")
components = func_regexp.match(func_name)
if components is None:
return unknown
return components.group(2)
###############################################################################
# determine the symbols directory in the local build
###############################################################################
def FindSymbolsDir():
global symbols_dir
try:
path = os.environ['ANDROID_PRODUCT_OUT'] + "/symbols"
except:
cmd = "CALLED_FROM_SETUP=true BUILD_SYSTEM=build/core " \
+ "SRC_TARGET_DIR=build/target make -f build/core/config.mk " \
+ "dumpvar-abs-TARGET_OUT_UNSTRIPPED"
stream = os.popen(cmd)
str = stream.read()
stream.close()
path = str.strip()
if (not os.path.exists(path)):
print path + " not found!"
sys.exit(1)
symbols_dir = path
###############################################################################
# determine the path of binutils
###############################################################################
def SetupToolsPath():
global addr2line_cmd
global objdump_cmd
global cppfilt_cmd
global symbols_dir
uname = os.uname()[0]
if uname == "Darwin":
uname = "darwin-x86"
elif uname == "Linux":
uname = "linux-x86"
gcc_version = os.environ["TARGET_GCC_VERSION"]
prefix = "./prebuilts/gcc/" + uname + "/arm/arm-linux-androideabi-" + \
gcc_version + "/bin/"
addr2line_cmd = prefix + "arm-linux-androideabi-addr2line"
if (not os.path.exists(addr2line_cmd)):
try:
prefix = os.environ['ANDROID_BUILD_TOP'] + "/prebuilts/gcc/" + \
uname + "/arm/arm-linux-androideabi-" + gcc_version + "/bin/"
except:
prefix = "";
addr2line_cmd = prefix + "arm-linux-androideabi-addr2line"
if (not os.path.exists(addr2line_cmd)):
print addr2line_cmd + " not found!"
sys.exit(1)
objdump_cmd = prefix + "arm-linux-androideabi-objdump"
cppfilt_cmd = prefix + "arm-linux-androideabi-c++filt"
###############################################################################
# look up the function and file/line number for a raw stack trace line
# groups[0]: log tag
# groups[1]: stack level
# groups[2]: "pc"
# groups[3]: code address
# groups[4]: library name
###############################################################################
def SymbolTranslation(groups):
lib_name = groups[4]
code_addr = groups[3]
caller = CallObjdump(lib_name, code_addr)
func_line_pair = CallAddr2Line(lib_name, code_addr)
# If a callee is inlined to the caller, objdump will see the caller's
# address but addr2line will report the callee's address. So the printed
# format is desgined to be "caller<-callee file:line"
if (func_line_pair[0] != caller):
print groups[0] + groups[1] + " " + caller + "<-" + \
' '.join(func_line_pair[:]) + " "
else:
print groups[0] + groups[1] + " " + ' '.join(func_line_pair[:]) + " "
###############################################################################
if __name__ == '__main__':
# pass the options to adb
adb_cmd = "adb " + ' '.join(sys.argv[1:])
# setup addr2line_cmd and objdump_cmd
SetupToolsPath()
# setup the symbols directory
FindSymbolsDir()
# invoke the adb command and filter its output
stream = os.popen(adb_cmd)
while (True):
line = stream.readline()
# EOF reached
if (line == ''):
break
# remove the trailing \n
line = line.strip()
# see if this is a stack trace line
match = trace_line.match(line)
if (match):
groups = match.groups()
# translate raw address into symbols
SymbolTranslation(groups)
else:
print line
sys.stdout.flush()
# adb itself aborts
stream.close()
| [
[
1,
0,
0.0773,
0.0045,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0818,
0.0045,
0,
0.66,
0.1,
540,
0,
1,
0,
0,
540,
0,
0
],
[
1,
0,
0.0864,
0.0045,
0,
0.6... | [
"import os",
"import re",
"import string",
"import sys",
"trace_line = re.compile(\"(.*)(\\#[0-9]+) (..) ([0-9a-f]{8}) ([^\\r\\n \\t]*)\")",
"def CallAddr2Line(lib, addr):\n global symbols_dir\n global addr2line_cmd\n global cppfilt_cmd\n\n if lib != \"\":\n cmd = addr2line_cmd + \\\n \" -... |
#!/usr/bin/env python
import os
import re
import sys
def fail_with_usage():
sys.stderr.write("usage: java-layers.py DEPENDENCY_FILE SOURCE_DIRECTORIES...\n")
sys.stderr.write("\n")
sys.stderr.write("Enforces layering between java packages. Scans\n")
sys.stderr.write("DIRECTORY and prints errors when the packages violate\n")
sys.stderr.write("the rules defined in the DEPENDENCY_FILE.\n")
sys.stderr.write("\n")
sys.stderr.write("Prints a warning when an unknown package is encountered\n")
sys.stderr.write("on the assumption that it should fit somewhere into the\n")
sys.stderr.write("layering.\n")
sys.stderr.write("\n")
sys.stderr.write("DEPENDENCY_FILE format\n")
sys.stderr.write(" - # starts comment\n")
sys.stderr.write(" - Lines consisting of two java package names: The\n")
sys.stderr.write(" first package listed must not contain any references\n")
sys.stderr.write(" to any classes present in the second package, or any\n")
sys.stderr.write(" of its dependencies.\n")
sys.stderr.write(" - Lines consisting of one java package name: The\n")
sys.stderr.write(" packge is assumed to be a high level package and\n")
sys.stderr.write(" nothing may depend on it.\n")
sys.stderr.write(" - Lines consisting of a dash (+) followed by one java\n")
sys.stderr.write(" package name: The package is considered a low level\n")
sys.stderr.write(" package and may not import any of the other packages\n")
sys.stderr.write(" listed in the dependency file.\n")
sys.stderr.write(" - Lines consisting of a plus (-) followed by one java\n")
sys.stderr.write(" package name: The package is considered \'legacy\'\n")
sys.stderr.write(" and excluded from errors.\n")
sys.stderr.write("\n")
sys.exit(1)
class Dependency:
def __init__(self, filename, lineno, lower, top, lowlevel, legacy):
self.filename = filename
self.lineno = lineno
self.lower = lower
self.top = top
self.lowlevel = lowlevel
self.legacy = legacy
self.uppers = []
self.transitive = set()
def matches(self, imp):
for d in self.transitive:
if imp.startswith(d):
return True
return False
class Dependencies:
def __init__(self, deps):
def recurse(obj, dep, visited):
global err
if dep in visited:
sys.stderr.write("%s:%d: Circular dependency found:\n"
% (dep.filename, dep.lineno))
for v in visited:
sys.stderr.write("%s:%d: Dependency: %s\n"
% (v.filename, v.lineno, v.lower))
err = True
return
visited.append(dep)
for upper in dep.uppers:
obj.transitive.add(upper)
if upper in deps:
recurse(obj, deps[upper], visited)
self.deps = deps
self.parts = [(dep.lower.split('.'),dep) for dep in deps.itervalues()]
# transitive closure of dependencies
for dep in deps.itervalues():
recurse(dep, dep, [])
# disallow everything from the low level components
for dep in deps.itervalues():
if dep.lowlevel:
for d in deps.itervalues():
if dep != d and not d.legacy:
dep.transitive.add(d.lower)
# disallow the 'top' components everywhere but in their own package
for dep in deps.itervalues():
if dep.top and not dep.legacy:
for d in deps.itervalues():
if dep != d and not d.legacy:
d.transitive.add(dep.lower)
for dep in deps.itervalues():
dep.transitive = set([x+"." for x in dep.transitive])
if False:
for dep in deps.itervalues():
print "-->", dep.lower, "-->", dep.transitive
# Lookup the dep object for the given package. If pkg is a subpackage
# of one with a rule, that one will be returned. If no matches are found,
# None is returned.
def lookup(self, pkg):
# Returns the number of parts that match
def compare_parts(parts, pkg):
if len(parts) > len(pkg):
return 0
n = 0
for i in range(0, len(parts)):
if parts[i] != pkg[i]:
return 0
n = n + 1
return n
pkg = pkg.split(".")
matched = 0
result = None
for (parts,dep) in self.parts:
x = compare_parts(parts, pkg)
if x > matched:
matched = x
result = dep
return result
def parse_dependency_file(filename):
global err
f = file(filename)
lines = f.readlines()
f.close()
def lineno(s, i):
i[0] = i[0] + 1
return (i[0],s)
n = [0]
lines = [lineno(x,n) for x in lines]
lines = [(n,s.split("#")[0].strip()) for (n,s) in lines]
lines = [(n,s) for (n,s) in lines if len(s) > 0]
lines = [(n,s.split()) for (n,s) in lines]
deps = {}
for n,words in lines:
if len(words) == 1:
lower = words[0]
top = True
legacy = False
lowlevel = False
if lower[0] == '+':
lower = lower[1:]
top = False
lowlevel = True
elif lower[0] == '-':
lower = lower[1:]
legacy = True
if lower in deps:
sys.stderr.write(("%s:%d: Package '%s' already defined on"
+ " line %d.\n") % (filename, n, lower, deps[lower].lineno))
err = True
else:
deps[lower] = Dependency(filename, n, lower, top, lowlevel, legacy)
elif len(words) == 2:
lower = words[0]
upper = words[1]
if lower in deps:
dep = deps[lower]
if dep.top:
sys.stderr.write(("%s:%d: Can't add dependency to top level package "
+ "'%s'\n") % (filename, n, lower))
err = True
else:
dep = Dependency(filename, n, lower, False, False, False)
deps[lower] = dep
dep.uppers.append(upper)
else:
sys.stderr.write("%s:%d: Too many words on line starting at \'%s\'\n" % (
filename, n, words[2]))
err = True
return Dependencies(deps)
def find_java_files(srcs):
result = []
for d in srcs:
if d[0] == '@':
f = file(d[1:])
result.extend([fn for fn in [s.strip() for s in f.readlines()]
if len(fn) != 0])
f.close()
else:
for root, dirs, files in os.walk(d):
result.extend([os.sep.join((root,f)) for f in files
if f.lower().endswith(".java")])
return result
COMMENTS = re.compile("//.*?\n|/\*.*?\*/", re.S)
PACKAGE = re.compile("package\s+(.*)")
IMPORT = re.compile("import\s+(.*)")
def examine_java_file(deps, filename):
global err
# Yes, this is a crappy java parser. Write a better one if you want to.
f = file(filename)
text = f.read()
f.close()
text = COMMENTS.sub("", text)
index = text.find("{")
if index < 0:
sys.stderr.write(("%s: Error: Unable to parse java. Can't find class "
+ "declaration.\n") % filename)
err = True
return
text = text[0:index]
statements = [s.strip() for s in text.split(";")]
# First comes the package declaration. Then iterate while we see import
# statements. Anything else is either bad syntax that we don't care about
# because the compiler will fail, or the beginning of the class declaration.
m = PACKAGE.match(statements[0])
if not m:
sys.stderr.write(("%s: Error: Unable to parse java. Missing package "
+ "statement.\n") % filename)
err = True
return
pkg = m.group(1)
imports = []
for statement in statements[1:]:
m = IMPORT.match(statement)
if not m:
break
imports.append(m.group(1))
# Do the checking
if False:
print filename
print "'%s' --> %s" % (pkg, imports)
dep = deps.lookup(pkg)
if not dep:
sys.stderr.write(("%s: Error: Package does not appear in dependency file: "
+ "%s\n") % (filename, pkg))
err = True
return
for imp in imports:
if dep.matches(imp):
sys.stderr.write("%s: Illegal import in package '%s' of '%s'\n"
% (filename, pkg, imp))
err = True
err = False
def main(argv):
if len(argv) < 3:
fail_with_usage()
deps = parse_dependency_file(argv[1])
if err:
sys.exit(1)
java = find_java_files(argv[2:])
for filename in java:
examine_java_file(deps, filename)
if err:
sys.stderr.write("%s: Using this file as dependency file.\n" % argv[1])
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
main(sys.argv)
| [
[
1,
0,
0.0117,
0.0039,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0156,
0.0039,
0,
0.66,
0.0714,
540,
0,
1,
0,
0,
540,
0,
0
],
[
1,
0,
0.0195,
0.0039,
0,
... | [
"import os",
"import re",
"import sys",
"def fail_with_usage():\n sys.stderr.write(\"usage: java-layers.py DEPENDENCY_FILE SOURCE_DIRECTORIES...\\n\")\n sys.stderr.write(\"\\n\")\n sys.stderr.write(\"Enforces layering between java packages. Scans\\n\")\n sys.stderr.write(\"DIRECTORY and prints errors whe... |
#!/usr/bin/env python
# This file uses the following encoding: utf-8
import sys
import re
if len(sys.argv) == 1:
print 'usage: ' + sys.argv[0] + ' <build.log>'
sys.exit()
# if you add another level, don't forget to give it a color below
class severity:
UNKNOWN=0
SKIP=100
FIXMENOW=1
HIGH=2
MEDIUM=3
LOW=4
HARMLESS=5
def colorforseverity(sev):
if sev == severity.FIXMENOW:
return 'fuchsia'
if sev == severity.HIGH:
return 'red'
if sev == severity.MEDIUM:
return 'orange'
if sev == severity.LOW:
return 'yellow'
if sev == severity.HARMLESS:
return 'limegreen'
if sev == severity.UNKNOWN:
return 'blue'
return 'grey'
warnpatterns = [
{ 'category':'make', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'make: overriding commands/ignoring old commands',
'patterns':[r".*: warning: overriding commands for target .+",
r".*: warning: ignoring old commands for target .+"] },
{ 'category':'C/C++', 'severity':severity.HIGH, 'members':[], 'option':'-Wimplicit-function-declaration',
'description':'Implicit function declaration',
'patterns':[r".*: warning: implicit declaration of function .+"] },
{ 'category':'C/C++', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: conflicting types for '.+'"] },
{ 'category':'C/C++', 'severity':severity.HIGH, 'members':[], 'option':'-Wtype-limits',
'description':'Expression always evaluates to true or false',
'patterns':[r".*: warning: comparison is always false due to limited range of data type",
r".*: warning: comparison of unsigned expression >= 0 is always true",
r".*: warning: comparison of unsigned expression < 0 is always false"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Incompatible pointer types',
'patterns':[r".*: warning: assignment from incompatible pointer type",
r".*: warning: return from incompatible pointer type",
r".*: warning: passing argument [0-9]+ of '.*' from incompatible pointer type",
r".*: warning: initialization from incompatible pointer type"] },
{ 'category':'C/C++', 'severity':severity.HIGH, 'members':[], 'option':'-fno-builtin',
'description':'Incompatible declaration of built in function',
'patterns':[r".*: warning: incompatible implicit declaration of built-in function .+"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wunused-parameter',
'description':'Unused parameter',
'patterns':[r".*: warning: unused parameter '.*'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wunused',
'description':'Unused function, variable or label',
'patterns':[r".*: warning: '.+' defined but not used",
r".*: warning: unused variable '.+'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wunused-value',
'description':'Statement with no effect',
'patterns':[r".*: warning: statement with no effect"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wmissing-field-initializers',
'description':'Missing initializer',
'patterns':[r".*: warning: missing initializer"] },
{ 'category':'cont.', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: \(near initialization for '.+'\)"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wformat',
'description':'Format string does not match arguments',
'patterns':[r".*: warning: format '.+' expects type '.+', but argument [0-9]+ has type '.+'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wformat-extra-args',
'description':'Too many arguments for format string',
'patterns':[r".*: warning: too many arguments for format"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wsign-compare',
'description':'Comparison between signed and unsigned',
'patterns':[r".*: warning: comparison between signed and unsigned",
r".*: warning: comparison of promoted \~unsigned with unsigned",
r".*: warning: signed and unsigned type in conditional expression"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Comparison between enum and non-enum',
'patterns':[r".*: warning: enumeral and non-enumeral type in conditional expression"] },
{ 'category':'libpng', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'libpng: zero area',
'patterns':[r".*libpng warning: Ignoring attempt to set cHRM RGB triangle with zero area"] },
{ 'category':'aapt', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'aapt: no comment for public symbol',
'patterns':[r".*: warning: No comment for public symbol .+"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wmissing-braces',
'description':'Missing braces around initializer',
'patterns':[r".*: warning: missing braces around initializer.*"] },
{ 'category':'C/C++', 'severity':severity.HARMLESS, 'members':[], 'option':'',
'description':'No newline at end of file',
'patterns':[r".*: warning: no newline at end of file"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wcast-qual',
'description':'Qualifier discarded',
'patterns':[r".*: warning: passing argument [0-9]+ of '.+' discards qualifiers from pointer target type",
r".*: warning: assignment discards qualifiers from pointer target type",
r".*: warning: return discards qualifiers from pointer target type"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wattributes',
'description':'Attribute ignored',
'patterns':[r".*: warning: '_*packed_*' attribute ignored"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wattributes',
'description':'Visibility mismatch',
'patterns':[r".*: warning: '.+' declared with greater visibility than the type of its field '.+'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Shift count greater than width of type',
'patterns':[r".*: warning: (left|right) shift count >= width of type"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'extern <foo> is initialized',
'patterns':[r".*: warning: '.+' initialized and declared 'extern'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wold-style-declaration',
'description':'Old style declaration',
'patterns':[r".*: warning: 'static' is not at beginning of declaration"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wuninitialized',
'description':'Variable may be used uninitialized',
'patterns':[r".*: warning: '.+' may be used uninitialized in this function"] },
{ 'category':'C/C++', 'severity':severity.HIGH, 'members':[], 'option':'-Wuninitialized',
'description':'Variable is used uninitialized',
'patterns':[r".*: warning: '.+' is used uninitialized in this function"] },
{ 'category':'ld', 'severity':severity.MEDIUM, 'members':[], 'option':'-fshort-enums',
'description':'ld: possible enum size mismatch',
'patterns':[r".*: warning: .* uses variable-size enums yet the output is to use 32-bit enums; use of enum values across objects may fail"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wpointer-sign',
'description':'Pointer targets differ in signedness',
'patterns':[r".*: warning: pointer targets in initialization differ in signedness",
r".*: warning: pointer targets in assignment differ in signedness",
r".*: warning: pointer targets in return differ in signedness",
r".*: warning: pointer targets in passing argument [0-9]+ of '.+' differ in signedness"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wstrict-overflow',
'description':'Assuming overflow does not occur',
'patterns':[r".*: warning: assuming signed overflow does not occur when assuming that .* is always (true|false)"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wempty-body',
'description':'Suggest adding braces around empty body',
'patterns':[r".*: warning: suggest braces around empty body in an 'if' statement",
r".*: warning: empty body in an if-statement",
r".*: warning: suggest braces around empty body in an 'else' statement",
r".*: warning: empty body in an else-statement"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wparentheses',
'description':'Suggest adding parentheses',
'patterns':[r".*: warning: suggest explicit braces to avoid ambiguous 'else'",
r".*: warning: suggest parentheses around arithmetic in operand of '.+'",
r".*: warning: suggest parentheses around comparison in operand of '.+'",
r".*: warning: suggest parentheses around '.+?' .+ '.+?'",
r".*: warning: suggest parentheses around assignment used as truth value"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Static variable used in non-static inline function',
'patterns':[r".*: warning: '.+' is static but used in inline function '.+' which is not static"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wimplicit int',
'description':'No type or storage class (will default to int)',
'patterns':[r".*: warning: data definition has no type or storage class"] },
{ 'category':'cont.', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: type defaults to 'int' in declaration of '.+'"] },
{ 'category':'cont.', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: parameter names \(without types\) in function declaration"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wstrict-aliasing',
'description':'Dereferencing <foo> breaks strict aliasing rules',
'patterns':[r".*: warning: dereferencing .* break strict-aliasing rules"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wpointer-to-int-cast',
'description':'Cast from pointer to integer of different size',
'patterns':[r".*: warning: cast from pointer to integer of different size"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wint-to-pointer-cast',
'description':'Cast to pointer from integer of different size',
'patterns':[r".*: warning: cast to pointer from integer of different size"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Symbol redefined',
'patterns':[r".*: warning: "".+"" redefined"] },
{ 'category':'cont.', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: this is the location of the previous definition"] },
{ 'category':'ld', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'ld: type and size of dynamic symbol are not defined',
'patterns':[r".*: warning: type and size of dynamic symbol `.+' are not defined"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Pointer from integer without cast',
'patterns':[r".*: warning: assignment makes pointer from integer without a cast"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Pointer from integer without cast',
'patterns':[r".*: warning: passing argument [0-9]+ of '.+' makes pointer from integer without a cast"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Integer from pointer without cast',
'patterns':[r".*: warning: assignment makes integer from pointer without a cast"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Integer from pointer without cast',
'patterns':[r".*: warning: passing argument [0-9]+ of '.+' makes integer from pointer without a cast"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Integer from pointer without cast',
'patterns':[r".*: warning: return makes integer from pointer without a cast"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wunknown-pragmas',
'description':'Ignoring pragma',
'patterns':[r".*: warning: ignoring #pragma .+"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wclobbered',
'description':'Variable might be clobbered by longjmp or vfork',
'patterns':[r".*: warning: variable '.+' might be clobbered by 'longjmp' or 'vfork'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wclobbered',
'description':'Argument might be clobbered by longjmp or vfork',
'patterns':[r".*: warning: argument '.+' might be clobbered by 'longjmp' or 'vfork'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wredundant-decls',
'description':'Redundant declaration',
'patterns':[r".*: warning: redundant redeclaration of '.+'"] },
{ 'category':'cont.', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: previous declaration of '.+' was here"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wswitch-enum',
'description':'Enum value not handled in switch',
'patterns':[r".*: warning: enumeration value '.+' not handled in switch"] },
{ 'category':'java', 'severity':severity.MEDIUM, 'members':[], 'option':'-encoding',
'description':'Java: Non-ascii characters used, but ascii encoding specified',
'patterns':[r".*: warning: unmappable character for encoding ascii"] },
{ 'category':'java', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Java: Non-varargs call of varargs method with inexact argument type for last parameter',
'patterns':[r".*: warning: non-varargs call of varargs method with inexact argument type for last parameter"] },
{ 'category':'aapt', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'aapt: No default translation',
'patterns':[r".*: warning: string '.+' has no default translation in .*"] },
{ 'category':'aapt', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'aapt: Missing default or required localization',
'patterns':[r".*: warning: \*\*\*\* string '.+' has no default or required localization for '.+' in .+"] },
{ 'category':'aapt', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'aapt: String marked untranslatable, but translation exists',
'patterns':[r".*: warning: string '.+' in .* marked untranslatable but exists in locale '??_??'"] },
{ 'category':'aapt', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'aapt: empty span in string',
'patterns':[r".*: warning: empty '.+' span found in text '.+"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Taking address of temporary',
'patterns':[r".*: warning: taking address of temporary"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Possible broken line continuation',
'patterns':[r".*: warning: backslash and newline separated by space"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Warray-bounds',
'description':'Array subscript out of bounds',
'patterns':[r".*: warning: array subscript is above array bounds",
r".*: warning: array subscript is below array bounds"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Decimal constant is unsigned only in ISO C90',
'patterns':[r".*: warning: this decimal constant is unsigned only in ISO C90"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wmain',
'description':'main is usually a function',
'patterns':[r".*: warning: 'main' is usually a function"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Typedef ignored',
'patterns':[r".*: warning: 'typedef' was ignored in this declaration"] },
{ 'category':'C/C++', 'severity':severity.HIGH, 'members':[], 'option':'-Waddress',
'description':'Address always evaluates to true',
'patterns':[r".*: warning: the address of '.+' will always evaluate as 'true'"] },
{ 'category':'C/C++', 'severity':severity.FIXMENOW, 'members':[], 'option':'',
'description':'Freeing a non-heap object',
'patterns':[r".*: warning: attempt to free a non-heap object '.+'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wchar-subscripts',
'description':'Array subscript has type char',
'patterns':[r".*: warning: array subscript has type 'char'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Constant too large for type',
'patterns':[r".*: warning: integer constant is too large for '.+' type"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Woverflow',
'description':'Constant too large for type, truncated',
'patterns':[r".*: warning: large integer implicitly truncated to unsigned type"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Woverflow',
'description':'Overflow in implicit constant conversion',
'patterns':[r".*: warning: overflow in implicit constant conversion"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Declaration does not declare anything',
'patterns':[r".*: warning: declaration 'class .+' does not declare anything"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wreorder',
'description':'Initialization order will be different',
'patterns':[r".*: warning: '.+' will be initialized after"] },
{ 'category':'cont.', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: '.+'"] },
{ 'category':'cont.', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: base '.+'"] },
{ 'category':'cont.', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: when initialized here"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wmissing-parameter-type',
'description':'Parameter type not specified',
'patterns':[r".*: warning: type of '.+' defaults to 'int'"] },
{ 'category':'gcc', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Invalid option for C file',
'patterns':[r".*: warning: command line option "".+"" is valid for C\+\+\/ObjC\+\+ but not for C"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'User warning',
'patterns':[r".*: warning: #warning "".+"""] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wextra',
'description':'Dereferencing void*',
'patterns':[r".*: warning: dereferencing 'void \*' pointer"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wextra',
'description':'Comparison of pointer to zero',
'patterns':[r".*: warning: ordered comparison of pointer with integer zero"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wwrite-strings',
'description':'Conversion of string constant to non-const char*',
'patterns':[r".*: warning: deprecated conversion from string constant to '.+'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wstrict-prototypes',
'description':'Function declaration isn''t a prototype',
'patterns':[r".*: warning: function declaration isn't a prototype"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wignored-qualifiers',
'description':'Type qualifiers ignored on function return value',
'patterns':[r".*: warning: type qualifiers ignored on function return type"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'<foo> declared inside parameter list, scope limited to this definition',
'patterns':[r".*: warning: '.+' declared inside parameter list"] },
{ 'category':'cont.', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: its scope is only this definition or declaration, which is probably not what you want"] },
{ 'category':'C/C++', 'severity':severity.LOW, 'members':[], 'option':'-Wcomment',
'description':'Line continuation inside comment',
'patterns':[r".*: warning: multi-line comment"] },
{ 'category':'C/C++', 'severity':severity.LOW, 'members':[], 'option':'-Wcomment',
'description':'Comment inside comment',
'patterns':[r".*: warning: "".+"" within comment"] },
{ 'category':'C/C++', 'severity':severity.HARMLESS, 'members':[], 'option':'',
'description':'Extra tokens after #endif',
'patterns':[r".*: warning: extra tokens at end of #endif directive"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wenum-compare',
'description':'Comparison between different enums',
'patterns':[r".*: warning: comparison between 'enum .+' and 'enum .+'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wconversion',
'description':'Implicit conversion of negative number to unsigned type',
'patterns':[r".*: warning: converting negative value '.+' to '.+'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Passing NULL as non-pointer argument',
'patterns':[r".*: warning: passing NULL to non-pointer argument [0-9]+ of '.+'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wctor-dtor-privacy',
'description':'Class seems unusable because of private ctor/dtor' ,
'patterns':[r".*: warning: all member functions in class '.+' are private"] },
# skip this next one, because it only points out some RefBase-based classes where having a private destructor is perfectly fine
{ 'category':'C/C++', 'severity':severity.SKIP, 'members':[], 'option':'-Wctor-dtor-privacy',
'description':'Class seems unusable because of private ctor/dtor' ,
'patterns':[r".*: warning: 'class .+' only defines a private destructor and has no friends"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wctor-dtor-privacy',
'description':'Class seems unusable because of private ctor/dtor' ,
'patterns':[r".*: warning: 'class .+' only defines private constructors and has no friends"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wpointer-arith',
'description':'void* used in arithmetic' ,
'patterns':[r".*: warning: pointer of type 'void \*' used in (arithmetic|subtraction)",
r".*: warning: wrong type argument to increment"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'-Wsign-promo',
'description':'Overload resolution chose to promote from unsigned or enum to signed type' ,
'patterns':[r".*: warning: passing '.+' chooses 'int' over '.* int'"] },
{ 'category':'cont.', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: in call to '.+'"] },
{ 'category':'C/C++', 'severity':severity.HIGH, 'members':[], 'option':'-Wextra',
'description':'Base should be explicitly initialized in copy constructor',
'patterns':[r".*: warning: base class '.+' should be explicitly initialized in the copy constructor"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Converting from <type> to <other type>',
'patterns':[r".*: warning: converting to '.+' from '.+'"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Return value from void function',
'patterns':[r".*: warning: 'return' with a value, in function returning void"] },
{ 'category':'C/C++', 'severity':severity.LOW, 'members':[], 'option':'',
'description':'Useless specifier',
'patterns':[r".*: warning: useless storage class specifier in empty declaration"] },
{ 'category':'logtags', 'severity':severity.LOW, 'members':[], 'option':'',
'description':'Duplicate logtag',
'patterns':[r".*: warning: tag "".+"" \(None\) duplicated in .+"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Operator new returns NULL',
'patterns':[r".*: warning: 'operator new' must not return NULL unless it is declared 'throw\(\)' .+"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'NULL used in arithmetic',
'patterns':[r".*: warning: NULL used in arithmetic"] },
{ 'category':'C/C++', 'severity':severity.MEDIUM, 'members':[], 'option':'',
'description':'Use of deprecated method',
'patterns':[r".*: warning: '.+' is deprecated .+"] },
# these next ones are to deal with formatting problems resulting from the log being mixed up by 'make -j'
{ 'category':'C/C++', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: ,$"] },
{ 'category':'C/C++', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: $"] },
{ 'category':'C/C++', 'severity':severity.SKIP, 'members':[], 'option':'',
'description':'',
'patterns':[r".*: warning: In file included from .+,"] },
# catch-all for warnings this script doesn't know about yet
{ 'category':'C/C++', 'severity':severity.UNKNOWN, 'members':[], 'option':'',
'description':'Unclassified/unrecognized warnings',
'patterns':[r".*: warning: .+"] },
]
anchor = 0
cur_row_color = 0
row_colors = [ 'e0e0e0', 'd0d0d0' ]
def output(text):
print text,
def htmlbig(param):
return '<font size="+2">' + param + '</font>'
def dumphtmlprologue(title):
output('<html>\n<head>\n<title>' + title + '</title>\n<body>\n')
output(htmlbig(title))
output('<p>\n')
def tablerow(text):
global cur_row_color
output('<tr bgcolor="' + row_colors[cur_row_color] + '"><td colspan="2">',)
cur_row_color = 1 - cur_row_color
output(text,)
output('</td></tr>')
def begintable(text, backgroundcolor):
global anchor
output('<table border="1" rules="cols" frame="box" width="100%" bgcolor="black"><tr bgcolor="' +
backgroundcolor + '"><a name="anchor' + str(anchor) + '"><td>')
output(htmlbig(text[0]) + '<br>')
for i in text[1:]:
output(i + '<br>')
output('</td>')
output('<td width="100" bgcolor="grey"><a align="right" href="#anchor' + str(anchor-1) +
'">previous</a><br><a align="right" href="#anchor' + str(anchor+1) + '">next</a>')
output('</td></a></tr>')
anchor += 1
def endtable():
output('</table><p>')
# dump some stats about total number of warnings and such
def dumpstats():
known = 0
unknown = 0
for i in warnpatterns:
if i['severity'] == severity.UNKNOWN:
unknown += len(i['members'])
elif i['severity'] != severity.SKIP:
known += len(i['members'])
output('Number of classified warnings: <b>' + str(known) + '</b><br>' )
output('Number of unclassified warnings: <b>' + str(unknown) + '</b><br>')
total = unknown + known
output('Total number of warnings: <b>' + str(total) + '</b>')
if total < 1000:
output('(low count may indicate incremental build)')
output('<p>')
def allpatterns(cat):
pats = ''
for i in cat['patterns']:
pats += i
pats += ' / '
return pats
def descriptionfor(cat):
if cat['description'] != '':
return cat['description']
return allpatterns(cat)
# show which warnings no longer occur
def dumpfixed():
tablestarted = False
for i in warnpatterns:
if len(i['members']) == 0 and i['severity'] != severity.SKIP:
if tablestarted == False:
tablestarted = True
begintable(['Fixed warnings', 'No more occurences. Please consider turning these in to errors if possible, before they are reintroduced in to the build'], 'blue')
tablerow(i['description'] + ' (' + allpatterns(i) + ') ' + i['option'])
if tablestarted:
endtable()
# dump a category, provided it is not marked as 'SKIP' and has more than 0 occurrences
def dumpcategory(cat):
if cat['severity'] != severity.SKIP and len(cat['members']) != 0:
header = [descriptionfor(cat),str(len(cat['members'])) + ' occurences:']
if cat['option'] != '':
header[1:1] = [' (related option: ' + cat['option'] +')']
begintable(header, colorforseverity(cat['severity']))
for i in cat['members']:
tablerow(i)
endtable()
# dump everything for a given severity
def dumpseverity(sev):
for i in warnpatterns:
if i['severity'] == sev:
dumpcategory(i)
def classifywarning(line):
for i in warnpatterns:
for cpat in i['compiledpatterns']:
if cpat.match(line):
i['members'].append(line)
return
else:
# If we end up here, there was a problem parsing the log
# probably caused by 'make -j' mixing the output from
# 2 or more concurrent compiles
pass
# precompiling every pattern speeds up parsing by about 30x
def compilepatterns():
for i in warnpatterns:
i['compiledpatterns'] = []
for pat in i['patterns']:
i['compiledpatterns'].append(re.compile(pat))
infile = open(sys.argv[1], 'r')
warnings = []
platformversion = 'unknown'
targetproduct = 'unknown'
targetvariant = 'unknown'
linecounter = 0
warningpattern = re.compile('.* warning:.*')
compilepatterns()
# read the log file and classify all the warnings
lastmatchedline = ''
for line in infile:
# replace fancy quotes with plain ol' quotes
line = line.replace("‘", "'");
line = line.replace("’", "'");
if warningpattern.match(line):
if line != lastmatchedline:
classifywarning(line)
lastmatchedline = line
else:
# save a little bit of time by only doing this for the first few lines
if linecounter < 50:
linecounter +=1
m = re.search('(?<=^PLATFORM_VERSION=).*', line)
if m != None:
platformversion = m.group(0)
m = re.search('(?<=^TARGET_PRODUCT=).*', line)
if m != None:
targetproduct = m.group(0)
m = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
if m != None:
targetvariant = m.group(0)
# dump the html output to stdout
dumphtmlprologue('Warnings for ' + platformversion + ' - ' + targetproduct + ' - ' + targetvariant)
dumpstats()
dumpseverity(severity.FIXMENOW)
dumpseverity(severity.HIGH)
dumpseverity(severity.MEDIUM)
dumpseverity(severity.LOW)
dumpseverity(severity.HARMLESS)
dumpseverity(severity.UNKNOWN)
dumpfixed()
| [
[
1,
0,
0.0071,
0.0018,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0089,
0.0018,
0,
0.66,
0.0244,
540,
0,
1,
0,
0,
540,
0,
0
],
[
4,
0,
0.0142,
0.0053,
0,
... | [
"import sys",
"import re",
"if len(sys.argv) == 1:\n print('usage: ' + sys.argv[0] + ' <build.log>')\n sys.exit()",
" print('usage: ' + sys.argv[0] + ' <build.log>')",
" sys.exit()",
"class severity:\n UNKNOWN=0\n SKIP=100\n FIXMENOW=1\n HIGH=2\n MEDIUM=3\n LOW=4\n HARMLES... |
#!/usr/bin/env python
# vim: ts=2 sw=2 nocindent
import re
import sys
def choose_regex(regs, line):
for func,reg in regs:
m = reg.match(line)
if m:
return (func,m)
return (None,None)
def gather(included, deps):
result = set()
for inc in included:
result.add(inc)
for d in deps:
if inc == d[1]:
result.add(d[0])
return result
def main():
deps = []
infos = []
def dependency(m):
deps.append((m.group(1), m.group(2)))
def info(m):
infos.append((m.group(1), m.group(2)))
REGS = [
(dependency, re.compile(r'"(.*)"\s*->\s*"(.*)"')),
(info, re.compile(r'"(.*)"(\s*\[.*\])')),
]
lines = sys.stdin.readlines()
lines = [line.strip() for line in lines]
for line in lines:
func,m = choose_regex(REGS, line)
if func:
func(m)
# filter
sys.stderr.write("argv: " + str(sys.argv) + "\n")
if not (len(sys.argv) == 2 and sys.argv[1] == "--all"):
targets = sys.argv[1:]
included = set(targets)
prevLen = -1
while prevLen != len(included):
prevLen = len(included)
included = gather(included, deps)
deps = [dep for dep in deps if dep[1] in included]
infos = [info for info in infos if info[0] in included]
print "digraph {"
print "graph [ ratio=.5 ];"
for dep in deps:
print '"%s" -> "%s"' % dep
for info in infos:
print '"%s"%s' % info
print "}"
if __name__ == "__main__":
main()
| [
[
1,
0,
0.0588,
0.0147,
0,
0.66,
0,
540,
0,
1,
0,
0,
540,
0,
0
],
[
1,
0,
0.0735,
0.0147,
0,
0.66,
0.2,
509,
0,
1,
0,
0,
509,
0,
0
],
[
2,
0,
0.1397,
0.0882,
0,
0.6... | [
"import re",
"import sys",
"def choose_regex(regs, line):\n for func,reg in regs:\n m = reg.match(line)\n if m:\n return (func,m)\n return (None,None)",
" for func,reg in regs:\n m = reg.match(line)\n if m:\n return (func,m)",
" m = reg.match(line)",
" if m:\n return (f... |
#!/usr/bin/env python
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
# Put the modifications that you need to make into the /system/build.prop into this
# function. The prop object has get(name) and put(name,value) methods.
def mangle_build_prop(prop):
pass
# Put the modifications that you need to make into the /system/build.prop into this
# function. The prop object has get(name) and put(name,value) methods.
def mangle_default_prop(prop):
# If ro.debuggable is 1, then enable adb on USB by default
# (this is for userdebug builds)
if prop.get("ro.debuggable") == "1":
val = prop.get("persist.sys.usb.config")
if val == "":
val = "adb"
else:
val = val + ",adb"
prop.put("persist.sys.usb.config", val)
# UsbDeviceManager expects a value here. If it doesn't get it, it will
# default to "adb". That might not the right policy there, but it's better
# to be explicit.
if not prop.get("persist.sys.usb.config"):
prop.put("persist.sys.usb.config", "none");
class PropFile:
def __init__(self, lines):
self.lines = [s[:-1] for s in lines]
def get(self, name):
key = name + "="
for line in self.lines:
if line.startswith(key):
return line[len(key):]
return ""
def put(self, name, value):
key = name + "="
for i in range(0,len(self.lines)):
if self.lines[i].startswith(key):
self.lines[i] = key + value
return
self.lines.append(key + value)
def write(self, f):
f.write("\n".join(self.lines))
f.write("\n")
def main(argv):
filename = argv[1]
f = open(filename)
lines = f.readlines()
f.close()
properties = PropFile(lines)
if filename.endswith("/build.prop"):
mangle_build_prop(properties)
elif filename.endswith("/default.prop"):
mangle_default_prop(properties)
else:
sys.stderr.write("bad command line: " + str(argv) + "\n")
sys.exit(1)
f = open(filename, 'w+')
properties.write(f)
f.close()
if __name__ == "__main__":
main(sys.argv)
| [
[
1,
0,
0.2,
0.0118,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
2,
0,
0.2529,
0.0235,
0,
0.66,
0.2,
792,
0,
1,
0,
0,
0,
0,
0
],
[
2,
0,
0.3882,
0.1765,
0,
0.66,
... | [
"import sys",
"def mangle_build_prop(prop):\n pass",
"def mangle_default_prop(prop):\n # If ro.debuggable is 1, then enable adb on USB by default\n # (this is for userdebug builds)\n if prop.get(\"ro.debuggable\") == \"1\":\n val = prop.get(\"persist.sys.usb.config\")\n if val == \"\":\n val = \"... |
#!/usr/bin/env python
#
# Copyright (C) 2012 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
if len(sys.argv) < 2:
sys.exit(0)
build_info = {}
f = open(sys.argv[1])
for line in f:
line = line.strip()
if line.startswith("require"):
key, value = line.split()[1].split("=", 1)
build_info[key] = value
f.close()
bad = False
for item in sys.argv[2:]:
key, fn = item.split(":", 1)
values = build_info.get(key, None)
if not values:
continue
values = values.split("|")
f = open(fn, "rb")
digest = sha1(f.read()).hexdigest()
f.close()
versions = {}
try:
f = open(fn + ".sha1")
except IOError:
if not bad: print
print "*** Error opening \"%s.sha1\"; can't verify %s" % (fn, key)
bad = True
continue
for line in f:
line = line.strip()
if not line or line.startswith("#"): continue
h, v = line.split()
versions[h] = v
if digest not in versions:
if not bad: print
print "*** SHA-1 hash of \"%s\" doesn't appear in \"%s.sha1\"" % (fn, fn)
bad = True
continue
if versions[digest] not in values:
if not bad: print
print "*** \"%s\" is version %s; not any %s allowed by \"%s\"." % (
fn, versions[digest], key, sys.argv[1])
bad = True
if bad:
print
sys.exit(1)
| [
[
1,
0,
0.2237,
0.0132,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.2368,
0.0132,
0,
0.66,
0.1,
688,
0,
1,
0,
0,
688,
0,
0
],
[
7,
0,
0.2829,
0.0526,
0,
0.6... | [
"import sys",
"import os",
"try:\n from hashlib import sha1\nexcept ImportError:\n from sha import sha as sha1",
" from hashlib import sha1",
" from sha import sha as sha1",
"if len(sys.argv) < 2:\n sys.exit(0)",
" sys.exit(0)",
"build_info = {}",
"f = open(sys.argv[1])",
"for line in f:\n ... |
#!/usr/bin/env python
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Signs all the APK files in a target-files zipfile, producing a new
target-files zip.
Usage: sign_target_files_apks [flags] input_target_files output_target_files
-e (--extra_apks) <name,name,...=key>
Add extra APK name/key pairs as though they appeared in
apkcerts.txt (so mappings specified by -k and -d are applied).
Keys specified in -e override any value for that app contained
in the apkcerts.txt file. Option may be repeated to give
multiple extra packages.
-k (--key_mapping) <src_key=dest_key>
Add a mapping from the key name as specified in apkcerts.txt (the
src_key) to the real key you wish to sign the package with
(dest_key). Option may be repeated to give multiple key
mappings.
-d (--default_key_mappings) <dir>
Set up the following key mappings:
$devkey/devkey ==> $dir/releasekey
$devkey/testkey ==> $dir/releasekey
$devkey/media ==> $dir/media
$devkey/shared ==> $dir/shared
$devkey/platform ==> $dir/platform
where $devkey is the directory part of the value of
default_system_dev_certificate from the input target-files's
META/misc_info.txt. (Defaulting to "build/target/product/security"
if the value is not present in misc_info.
-d and -k options are added to the set of mappings in the order
in which they appear on the command line.
-o (--replace_ota_keys)
Replace the certificate (public key) used by OTA package
verification with the one specified in the input target_files
zip (in the META/otakeys.txt file). Key remapping (-k and -d)
is performed on this key.
-t (--tag_changes) <+tag>,<-tag>,...
Comma-separated list of changes to make to the set of tags (in
the last component of the build fingerprint). Prefix each with
'+' or '-' to indicate whether that tag should be added or
removed. Changes are processed in the order they appear.
Default value is "-test-keys,-dev-keys,+release-keys".
"""
import sys
if sys.hexversion < 0x02040000:
print >> sys.stderr, "Python 2.4 or newer is required."
sys.exit(1)
import cStringIO
import copy
import os
import re
import subprocess
import tempfile
import zipfile
import common
OPTIONS = common.OPTIONS
OPTIONS.extra_apks = {}
OPTIONS.key_map = {}
OPTIONS.replace_ota_keys = False
OPTIONS.tag_changes = ("-test-keys", "-dev-keys", "+release-keys")
def GetApkCerts(tf_zip):
certmap = common.ReadApkCerts(tf_zip)
# apply the key remapping to the contents of the file
for apk, cert in certmap.iteritems():
certmap[apk] = OPTIONS.key_map.get(cert, cert)
# apply all the -e options, overriding anything in the file
for apk, cert in OPTIONS.extra_apks.iteritems():
if not cert:
cert = "PRESIGNED"
certmap[apk] = OPTIONS.key_map.get(cert, cert)
return certmap
def CheckAllApksSigned(input_tf_zip, apk_key_map):
"""Check that all the APKs we want to sign have keys specified, and
error out if they don't."""
unknown_apks = []
for info in input_tf_zip.infolist():
if info.filename.endswith(".apk"):
name = os.path.basename(info.filename)
if name not in apk_key_map:
unknown_apks.append(name)
if unknown_apks:
print "ERROR: no key specified for:\n\n ",
print "\n ".join(unknown_apks)
print "\nUse '-e <apkname>=' to specify a key (which may be an"
print "empty string to not sign this apk)."
sys.exit(1)
def SignApk(data, keyname, pw):
unsigned = tempfile.NamedTemporaryFile()
unsigned.write(data)
unsigned.flush()
signed = tempfile.NamedTemporaryFile()
common.SignFile(unsigned.name, signed.name, keyname, pw, align=4)
data = signed.read()
unsigned.close()
signed.close()
return data
def SignApks(input_tf_zip, output_tf_zip, apk_key_map, key_passwords):
maxsize = max([len(os.path.basename(i.filename))
for i in input_tf_zip.infolist()
if i.filename.endswith('.apk')])
for info in input_tf_zip.infolist():
data = input_tf_zip.read(info.filename)
out_info = copy.copy(info)
if info.filename.endswith(".apk"):
name = os.path.basename(info.filename)
key = apk_key_map[name]
if key not in common.SPECIAL_CERT_STRINGS:
print " signing: %-*s (%s)" % (maxsize, name, key)
signed_data = SignApk(data, key, key_passwords[key])
output_tf_zip.writestr(out_info, signed_data)
else:
# an APK we're not supposed to sign.
print "NOT signing: %s" % (name,)
output_tf_zip.writestr(out_info, data)
elif info.filename in ("SYSTEM/build.prop",
"RECOVERY/RAMDISK/default.prop"):
print "rewriting %s:" % (info.filename,)
new_data = RewriteProps(data)
output_tf_zip.writestr(out_info, new_data)
else:
# a non-APK file; copy it verbatim
output_tf_zip.writestr(out_info, data)
def EditTags(tags):
"""Given a string containing comma-separated tags, apply the edits
specified in OPTIONS.tag_changes and return the updated string."""
tags = set(tags.split(","))
for ch in OPTIONS.tag_changes:
if ch[0] == "-":
tags.discard(ch[1:])
elif ch[0] == "+":
tags.add(ch[1:])
return ",".join(sorted(tags))
def RewriteProps(data):
output = []
for line in data.split("\n"):
line = line.strip()
original_line = line
if line and line[0] != '#':
key, value = line.split("=", 1)
if key == "ro.build.fingerprint":
pieces = value.split("/")
pieces[-1] = EditTags(pieces[-1])
value = "/".join(pieces)
elif key == "ro.build.description":
pieces = value.split(" ")
assert len(pieces) == 5
pieces[-1] = EditTags(pieces[-1])
value = " ".join(pieces)
elif key == "ro.build.tags":
value = EditTags(value)
line = key + "=" + value
if line != original_line:
print " replace: ", original_line
print " with: ", line
output.append(line)
return "\n".join(output) + "\n"
def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
try:
keylist = input_tf_zip.read("META/otakeys.txt").split()
except KeyError:
raise ExternalError("can't read META/otakeys.txt from input")
extra_recovery_keys = misc_info.get("extra_recovery_keys", None)
if extra_recovery_keys:
extra_recovery_keys = [OPTIONS.key_map.get(k, k) + ".x509.pem"
for k in extra_recovery_keys.split()]
if extra_recovery_keys:
print "extra recovery-only key(s): " + ", ".join(extra_recovery_keys)
else:
extra_recovery_keys = []
mapped_keys = []
for k in keylist:
m = re.match(r"^(.*)\.x509\.pem$", k)
if not m:
raise ExternalError("can't parse \"%s\" from META/otakeys.txt" % (k,))
k = m.group(1)
mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem")
if mapped_keys:
print "using:\n ", "\n ".join(mapped_keys)
print "for OTA package verification"
else:
devkey = misc_info.get("default_system_dev_certificate",
"build/target/product/security/testkey")
mapped_keys.append(
OPTIONS.key_map.get(devkey, devkey) + ".x509.pem")
print "META/otakeys.txt has no keys; using", mapped_keys[0]
# recovery uses a version of the key that has been slightly
# predigested (by DumpPublicKey.java) and put in res/keys.
# extra_recovery_keys are used only in recovery.
p = common.Run(["java", "-jar",
os.path.join(OPTIONS.search_path, "framework", "dumpkey.jar")]
+ mapped_keys + extra_recovery_keys,
stdout=subprocess.PIPE)
data, _ = p.communicate()
if p.returncode != 0:
raise ExternalError("failed to run dumpkeys")
common.ZipWriteStr(output_tf_zip, "RECOVERY/RAMDISK/res/keys", data)
# SystemUpdateActivity uses the x509.pem version of the keys, but
# put into a zipfile system/etc/security/otacerts.zip.
# We DO NOT include the extra_recovery_keys (if any) here.
tempfile = cStringIO.StringIO()
certs_zip = zipfile.ZipFile(tempfile, "w")
for k in mapped_keys:
certs_zip.write(k)
certs_zip.close()
common.ZipWriteStr(output_tf_zip, "SYSTEM/etc/security/otacerts.zip",
tempfile.getvalue())
def BuildKeyMap(misc_info, key_mapping_options):
for s, d in key_mapping_options:
if s is None: # -d option
devkey = misc_info.get("default_system_dev_certificate",
"build/target/product/security/testkey")
devkeydir = os.path.dirname(devkey)
OPTIONS.key_map.update({
devkeydir + "/testkey": d + "/releasekey",
devkeydir + "/devkey": d + "/releasekey",
devkeydir + "/media": d + "/media",
devkeydir + "/shared": d + "/shared",
devkeydir + "/platform": d + "/platform",
})
else:
OPTIONS.key_map[s] = d
def main(argv):
key_mapping_options = []
def option_handler(o, a):
if o in ("-e", "--extra_apks"):
names, key = a.split("=")
names = names.split(",")
for n in names:
OPTIONS.extra_apks[n] = key
elif o in ("-d", "--default_key_mappings"):
key_mapping_options.append((None, a))
elif o in ("-k", "--key_mapping"):
key_mapping_options.append(a.split("=", 1))
elif o in ("-o", "--replace_ota_keys"):
OPTIONS.replace_ota_keys = True
elif o in ("-t", "--tag_changes"):
new = []
for i in a.split(","):
i = i.strip()
if not i or i[0] not in "-+":
raise ValueError("Bad tag change '%s'" % (i,))
new.append(i[0] + i[1:].strip())
OPTIONS.tag_changes = tuple(new)
else:
return False
return True
args = common.ParseOptions(argv, __doc__,
extra_opts="e:d:k:ot:",
extra_long_opts=["extra_apks=",
"default_key_mappings=",
"key_mapping=",
"replace_ota_keys",
"tag_changes="],
extra_option_handler=option_handler)
if len(args) != 2:
common.Usage(__doc__)
sys.exit(1)
input_zip = zipfile.ZipFile(args[0], "r")
output_zip = zipfile.ZipFile(args[1], "w")
misc_info = common.LoadInfoDict(input_zip)
BuildKeyMap(misc_info, key_mapping_options)
apk_key_map = GetApkCerts(input_zip)
CheckAllApksSigned(input_zip, apk_key_map)
key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
SignApks(input_zip, output_zip, apk_key_map, key_passwords)
if OPTIONS.replace_ota_keys:
ReplaceOtaKeys(input_zip, output_zip, misc_info)
input_zip.close()
output_zip.close()
print "done."
if __name__ == '__main__':
try:
main(sys.argv[1:])
except common.ExternalError, e:
print
print " ERROR: %s" % (e,)
print
sys.exit(1)
| [
[
1,
0,
0.0037,
0.0037,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0112,
0.0037,
0,
0.66,
0.0588,
764,
0,
1,
0,
0,
764,
0,
0
],
[
1,
0,
0.0149,
0.0037,
0,
... | [
"import sys",
"import cStringIO",
"import copy",
"import os",
"import re",
"import subprocess",
"import tempfile",
"import zipfile",
"import common",
"def GetApkCerts(tf_zip):\n certmap = common.ReadApkCerts(tf_zip)\n\n # apply the key remapping to the contents of the file\n for apk, cert in ce... |
#!/usr/bin/env python
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import operator, os, sys
def get_file_size(path):
st = os.lstat(path)
return st.st_size;
def main(argv):
output = []
roots = argv[1:]
for root in roots:
base = len(root[:root.rfind(os.path.sep)])
for dir, dirs, files in os.walk(root):
relative = dir[base:]
for f in files:
try:
row = (
get_file_size(os.path.sep.join((dir, f))),
os.path.sep.join((relative, f)),
)
output.append(row)
except os.error:
pass
output.sort(key=operator.itemgetter(0), reverse=True)
for row in output:
print "%12d %s" % row
if __name__ == '__main__':
main(sys.argv)
| [
[
1,
0,
0.4,
0.0222,
0,
0.66,
0,
616,
0,
3,
0,
0,
616,
0,
0
],
[
2,
0,
0.4667,
0.0667,
0,
0.66,
0.3333,
852,
0,
1,
1,
0,
0,
0,
1
],
[
14,
1,
0.4667,
0.0222,
1,
0.03... | [
"import operator, os, sys",
"def get_file_size(path):\n st = os.lstat(path)\n return st.st_size;",
" st = os.lstat(path)",
" return st.st_size;",
"def main(argv):\n output = []\n roots = argv[1:]\n for root in roots:\n base = len(root[:root.rfind(os.path.sep)])\n for dir, dirs, files in os.walk... |
#!/usr/bin/env python
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Finds files with the specified name under a particular directory, stopping
# the search in a given subdirectory when the file is found.
#
import os
import sys
def perform_find(mindepth, prune, dirlist, filename):
result = []
pruneleaves = set(map(lambda x: os.path.split(x)[1], prune))
for rootdir in dirlist:
rootdepth = rootdir.count("/")
for root, dirs, files in os.walk(rootdir, followlinks=True):
# prune
check_prune = False
for d in dirs:
if d in pruneleaves:
check_prune = True
break
if check_prune:
i = 0
while i < len(dirs):
if dirs[i] in prune:
del dirs[i]
else:
i += 1
# mindepth
if mindepth > 0:
depth = 1 + root.count("/") - rootdepth
if depth < mindepth:
continue
# match
if filename in files:
result.append(os.path.join(root, filename))
del dirs[:]
return result
def usage():
sys.stderr.write("""Usage: %(progName)s [<options>] <dirlist> <filename>
Options:
--mindepth=<mindepth>
Both behave in the same way as their find(1) equivalents.
--prune=<dirname>
Avoids returning results from inside any directory called <dirname>
(e.g., "*/out/*"). May be used multiple times.
""" % {
"progName": os.path.split(sys.argv[0])[1],
})
sys.exit(1)
def main(argv):
mindepth = -1
prune = []
i=1
while i<len(argv) and len(argv[i])>2 and argv[i][0:2] == "--":
arg = argv[i]
if arg.startswith("--mindepth="):
try:
mindepth = int(arg[len("--mindepth="):])
except ValueError:
usage()
elif arg.startswith("--prune="):
p = arg[len("--prune="):]
if len(p) == 0:
usage()
prune.append(p)
else:
usage()
i += 1
if len(argv)-i < 2: # need both <dirlist> and <filename>
usage()
dirlist = argv[i:-1]
filename = argv[-1]
results = list(set(perform_find(mindepth, prune, dirlist, filename)))
results.sort()
for r in results:
print r
if __name__ == "__main__":
main(sys.argv)
| [
[
1,
0,
0.2347,
0.0102,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.2449,
0.0102,
0,
0.66,
0.2,
509,
0,
1,
0,
0,
509,
0,
0
],
[
2,
0,
0.4082,
0.2959,
0,
0.6... | [
"import os",
"import sys",
"def perform_find(mindepth, prune, dirlist, filename):\n result = []\n pruneleaves = set(map(lambda x: os.path.split(x)[1], prune))\n for rootdir in dirlist:\n rootdepth = rootdir.count(\"/\")\n for root, dirs, files in os.walk(rootdir, followlinks=True):\n # prune\n ... |
#!/usr/bin/env python
#
# Copyright (C) 2012 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Prints to stdout the package names that have overlay changes between
current_overlays.txt and previous_overlays.txt.
Usage: diff_package_overlays.py <current_packages.txt> <current_overlays.txt> <previous_overlays.txt>
current_packages.txt contains all package names separated by space in the current build.
This script modfies current_packages.txt if necessary: if there is a package in
previous_overlays.txt but absent from current_packages.txt, we copy that line
from previous_overlays.txt over to current_packages.txt. Usually that means we
just don't care that package in the current build (for example we are switching
from a full build to a partial build with mm/mmm), and we should carry on the
previous overlay config so current_overlays.txt always reflects the current
status of the entire tree.
Format of current_overlays.txt and previous_overlays.txt:
<package_name> <resource_overlay> [resource_overlay ...]
<package_name> <resource_overlay> [resource_overlay ...]
...
"""
import sys
def main(argv):
if len(argv) != 4:
print >> sys.stderr, __doc__
sys.exit(1)
f = open(argv[1])
all_packages = set(f.read().split())
f.close()
def load_overlay_config(filename):
f = open(filename)
result = {}
for line in f:
line = line.strip()
if not line or line.startswith("#"):
continue
words = line.split()
result[words[0]] = " ".join(words[1:])
f.close()
return result
current_overlays = load_overlay_config(argv[2])
previous_overlays = load_overlay_config(argv[3])
result = []
carryon = []
for p in current_overlays:
if p not in previous_overlays:
result.append(p)
elif current_overlays[p] != previous_overlays[p]:
result.append(p)
for p in previous_overlays:
if p not in current_overlays:
if p in all_packages:
# overlay changed
result.append(p)
else:
# we don't build p in the current build.
carryon.append(p)
# Add carryon to the current overlay config file.
if carryon:
f = open(argv[2], "a")
for p in carryon:
f.write(p + " " + previous_overlays[p] + "\n")
f.close()
# Print out the package names that have overlay change.
for r in result:
print r
if __name__ == "__main__":
main(sys.argv)
| [
[
8,
0,
0.2889,
0.2111,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.4111,
0.0111,
0,
0.66,
0.3333,
509,
0,
1,
0,
0,
509,
0,
0
],
[
2,
0,
0.7,
0.5444,
0,
0.66,
... | [
"\"\"\"\nPrints to stdout the package names that have overlay changes between\ncurrent_overlays.txt and previous_overlays.txt.\n\nUsage: diff_package_overlays.py <current_packages.txt> <current_overlays.txt> <previous_overlays.txt>\ncurrent_packages.txt contains all package names separated by space in the current b... |
#!/usr/bin/env python
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Usage: java-event-log-tags.py [-o output_file] <input_file> <merged_tags_file>
Generate a java class containing constants for each of the event log
tags in the given input file.
-h to display this usage message and exit.
"""
import cStringIO
import getopt
import os
import os.path
import re
import sys
import event_log_tags
output_file = None
try:
opts, args = getopt.getopt(sys.argv[1:], "ho:")
except getopt.GetoptError, err:
print str(err)
print __doc__
sys.exit(2)
for o, a in opts:
if o == "-h":
print __doc__
sys.exit(2)
elif o == "-o":
output_file = a
else:
print >> sys.stderr, "unhandled option %s" % (o,)
sys.exit(1)
if len(args) != 2:
print "need exactly two input files, not %d" % (len(args),)
print __doc__
sys.exit(1)
fn = args[0]
tagfile = event_log_tags.TagFile(fn)
# Load the merged tag file (which should have numbers assigned for all
# tags. Use the numbers from the merged file to fill in any missing
# numbers from the input file.
merged_fn = args[1]
merged_tagfile = event_log_tags.TagFile(merged_fn)
merged_by_name = dict([(t.tagname, t) for t in merged_tagfile.tags])
for t in tagfile.tags:
if t.tagnum is None:
if t.tagname in merged_by_name:
t.tagnum = merged_by_name[t.tagname].tagnum
else:
# We're building something that's not being included in the
# product, so its tags don't appear in the merged file. Assign
# them all an arbitrary number so we can emit the java and
# compile the (unused) package.
t.tagnum = 999999
if "java_package" not in tagfile.options:
tagfile.AddError("java_package option not specified", linenum=0)
hide = True
if "javadoc_hide" in tagfile.options:
hide = event_log_tags.BooleanFromString(tagfile.options["javadoc_hide"][0])
if tagfile.errors:
for fn, ln, msg in tagfile.errors:
print >> sys.stderr, "%s:%d: error: %s" % (fn, ln, msg)
sys.exit(1)
buffer = cStringIO.StringIO()
buffer.write("/* This file is auto-generated. DO NOT MODIFY.\n"
" * Source file: %s\n"
" */\n\n" % (fn,))
buffer.write("package %s;\n\n" % (tagfile.options["java_package"][0],))
basename, _ = os.path.splitext(os.path.basename(fn))
if hide:
buffer.write("/**\n"
" * @hide\n"
" */\n")
buffer.write("public class %s {\n" % (basename,))
buffer.write(" private %s() { } // don't instantiate\n" % (basename,))
for t in tagfile.tags:
if t.description:
buffer.write("\n /** %d %s %s */\n" % (t.tagnum, t.tagname, t.description))
else:
buffer.write("\n /** %d %s */\n" % (t.tagnum, t.tagname))
buffer.write(" public static final int %s = %d;\n" %
(t.tagname.upper(), t.tagnum))
keywords = frozenset(["abstract", "continue", "for", "new", "switch", "assert",
"default", "goto", "package", "synchronized", "boolean",
"do", "if", "private", "this", "break", "double",
"implements", "protected", "throw", "byte", "else",
"import", "public", "throws", "case", "enum",
"instanceof", "return", "transient", "catch", "extends",
"int", "short", "try", "char", "final", "interface",
"static", "void", "class", "finally", "long", "strictfp",
"volatile", "const", "float", "native", "super", "while"])
def javaName(name):
out = name[0].lower() + re.sub(r"[^A-Za-z0-9]", "", name.title())[1:]
if out in keywords:
out += "_"
return out
javaTypes = ["ERROR", "int", "long", "String", "Object[]"]
for t in tagfile.tags:
methodName = javaName("write_" + t.tagname)
if t.description:
args = [arg.strip("() ").split("|") for arg in t.description.split(",")]
else:
args = []
argTypesNames = ", ".join([javaTypes[int(arg[1])] + " " + javaName(arg[0]) for arg in args])
argNames = "".join([", " + javaName(arg[0]) for arg in args])
buffer.write("\n public static void %s(%s) {" % (methodName, argTypesNames))
buffer.write("\n android.util.EventLog.writeEvent(%s%s);" % (t.tagname.upper(), argNames))
buffer.write("\n }\n")
buffer.write("}\n");
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
event_log_tags.WriteOutput(output_file, buffer)
| [
[
1,
0,
0.0667,
0.0667,
0,
0.66,
0,
764,
0,
1,
0,
0,
764,
0,
0
],
[
1,
0,
0.1333,
0.0667,
0,
0.66,
0.1429,
588,
0,
1,
0,
0,
588,
0,
0
],
[
1,
0,
0.2,
0.0667,
0,
0.6... | [
"import cStringIO",
"import getopt",
"import os",
"import os.path",
"import re",
"import sys",
"import event_log_tags",
"def javaName(name):\n out = name[0].lower() + re.sub(r\"[^A-Za-z0-9]\", \"\", name.title())[1:]\n if out in keywords:\n out += \"_\"\n return out",
" out = name[0].lower() ... |
#!/usr/bin/python -E
import sys, os, re
excludes = [r'.*?/\.obj.*?',
r'.*?~',
r'.*?\/.DS_Store',
r'.*?\/.gdb_history',
r'.*?\/buildspec.mk',
r'.*?/\..*?\.swp',
r'.*?/out/.*?',
r'.*?/install/.*?']
excludes_compiled = map(re.compile, excludes)
def filter_excludes(str):
for e in excludes_compiled:
if e.match(str):
return False
return True
def split_perforce_parts(s):
spaces = ((s.count(" ") + 1) / 3) * 2
pos = 0
while spaces > 0:
pos = s.find(" ", pos) + 1
spaces = spaces - 1
return s[pos:]
def quotate(s):
return '"' + s + '"'
class PerforceError(Exception):
def __init__(self,value):
self.value = value
def __str__(self):
return repr(self.value)
def run(command, regex, filt):
def matchit(s):
m = regex_compiled.match(s)
if m:
return m.group(1)
else:
return ""
def filterit(s):
if filt_compiled.match(s):
return True
else:
return False
fd = os.popen(command);
lines = fd.readlines()
status = fd.close()
if status:
raise PerforceError("error calling " + command)
regex_compiled = re.compile(regex)
filt_compiled = re.compile(filt)
if len(lines) >= 1:
lines = filter(filterit, lines)
if len(lines) >= 1:
return map(matchit, lines)
return None
try:
if len(sys.argv) == 1:
do_exclude = True
elif len(sys.argv) == 2 and sys.argv[1] == "-a":
do_exclude = False
else:
print "usage: checktree [-a]"
print " -a don't filter common crud in the tree"
sys.exit(1)
have = run("p4 have ...", r'[^#]+#[0-9]+ - (.*)', r'.*')
cwd = os.getcwd()
files = run("find . -not -type d", r'.(.*)', r'.*')
files = map(lambda s: cwd+s, files)
added_depot_path = run("p4 opened ...", r'([^#]+)#.*', r'.*?#[0-9]+ - add .*');
added = []
if added_depot_path:
added_depot_path = map(quotate, added_depot_path)
where = "p4 where " + " ".join(added_depot_path)
added = run(where, r'(.*)', r'.*')
added = map(split_perforce_parts, added)
extras = []
# Python 2.3 -- still default on Mac OS X -- does not have set()
# Make dict's here to support the "in" operations below
have = dict().fromkeys(have, 1)
added = dict().fromkeys(added, 1)
for file in files:
if not file in have:
if not file in added:
extras.append(file)
if do_exclude:
extras = filter(filter_excludes, extras)
for s in extras:
print s.replace(" ", "\\ ")
except PerforceError, e:
sys.exit(2)
| [
[
1,
0,
0.0185,
0.0185,
0,
0.66,
0,
509,
0,
3,
0,
0,
509,
0,
0
],
[
2,
0,
0.0926,
0.0926,
0,
0.66,
0.2,
760,
0,
1,
1,
0,
0,
0,
1
],
[
6,
1,
0.0926,
0.0556,
1,
0.53,... | [
"import sys, os, re",
"def filter_excludes(str):\n for e in excludes_compiled:\n if e.match(str):\n return False\n return True",
" for e in excludes_compiled:\n if e.match(str):\n return False",
" if e.match(str):\n return False",
" r... |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| [
[
1,
0,
0.1818,
0.0909,
0,
0.66,
0,
879,
0,
1,
0,
0,
879,
0,
0
],
[
7,
0,
0.5,
0.5455,
0,
0.66,
0.5,
0,
0,
1,
0,
0,
0,
0,
2
],
[
1,
1,
0.3636,
0.0909,
1,
0.79,
... | [
"from django.core.management import execute_manager",
"try:\n import settings # Assumed to be in the same directory.\nexcept ImportError:\n import sys\n sys.stderr.write(\"Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\\nYou'll have to run dj... |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| [
[
1,
0,
0.1818,
0.0909,
0,
0.66,
0,
879,
0,
1,
0,
0,
879,
0,
0
],
[
7,
0,
0.5,
0.5455,
0,
0.66,
0.5,
0,
0,
1,
0,
0,
0,
0,
2
],
[
1,
1,
0.3636,
0.0909,
1,
0.32,
... | [
"from django.core.management import execute_manager",
"try:\n import settings # Assumed to be in the same directory.\nexcept ImportError:\n import sys\n sys.stderr.write(\"Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\\nYou'll have to run dj... |
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'^accounts/', 'amfgateway.gateway'),
# Uncomment the next line to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line for to enable the admin:
# (r'^admin/(.*)', admin.site.root),
)
| [
[
1,
0,
0.0667,
0.0667,
0,
0.66,
0,
341,
0,
1,
0,
0,
341,
0,
0
],
[
14,
0,
0.7333,
0.6,
0,
0.66,
1,
990,
3,
2,
0,
0,
75,
10,
1
]
] | [
"from django.conf.urls.defaults import *",
"urlpatterns = patterns('',\n (r'^accounts/', 'amfgateway.gateway'),\n\n # Uncomment the next line to enable admin documentation:\n # (r'^admin/doc/', include('django.contrib.admindocs.urls')),\n\n # Uncomment the next line for to enable the admin:\n # (r'... |
# Django settings for 3rdear project.
import os
PROJECT_PATH = os.path.abspath(os.path.dirname(__file__))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('zengsun', 'zengsun.gm@gmail.com'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'data.db' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Asia/Shanghai'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'zh-CN'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PROJECT_PATH, '../media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '6zgw=67#s7+21fr-idxscvqwk&p3zikv*d16beb68*09!(6qje'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.doc.XViewMiddleware',
)
ROOT_URLCONF = '3ear.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_PATH, '../templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
# user applications
'userprofile',
'music',
)
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
AUTH_PROFILE_MODULE = 'userprofile.Profile'
# Define template context
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
)
| [
[
1,
0,
0.0294,
0.0098,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
14,
0,
0.0392,
0.0098,
0,
0.66,
0.037,
660,
3,
1,
0,
0,
142,
10,
2
],
[
14,
0,
0.0588,
0.0098,
0,
... | [
"import os",
"PROJECT_PATH = os.path.abspath(os.path.dirname(__file__))",
"DEBUG = True",
"TEMPLATE_DEBUG = DEBUG",
"ADMINS = (\n ('zengsun', 'zengsun.gm@gmail.com'),\n)",
"MANAGERS = ADMINS",
"DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'... |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from django import newforms as forms
from django.utils.translation import gettext_lazy as _
from models import Profile
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
exclude = ('user', 'report_to',)
class UserForm(forms.Form):
login_name = forms.CharField(max_length=30, min_length=4)
email = forms.EmailField()
password = forms.CharField(max_length=12, min_length=6, widget=forms.PasswordInput)
repassword = forms.CharField(max_length=12, min_length=6,
widget=forms.PasswordInput)
def clean_login_name(self):
"""
Verify that the username isn't already registered
"""
username = self.cleaned_data.get("login_name")
if not set(username).issubset("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_."):
raise forms.ValidationError(_("That login name has invalid characters. The valid values are letters, numbers and underscore."))
if User.objects.filter(username__iexact=username).count() == 0:
return username
else:
raise forms.ValidationError(_("The login name is already registered."))
def clean(self):
"""
Verify that the 2 passwords fields are equal
"""
if self.cleaned_data.get("password") == self.cleaned_data.get("repassword"):
return self.cleaned_data
else:
raise forms.ValidationError(_("The passwords inserted are different."))
def clean_email(self):
"""
Verify that the email exists
"""
email = self.cleaned_data.get("email")
try:
User.objects.get(email=email)
except:
return email
raise forms.ValidationError(_("That e-mail is already used."))
| [
[
1,
0,
0.0727,
0.0182,
0,
0.66,
0,
294,
0,
1,
0,
0,
294,
0,
0
],
[
1,
0,
0.0909,
0.0182,
0,
0.66,
0.25,
389,
0,
1,
0,
0,
389,
0,
0
],
[
1,
0,
0.1273,
0.0182,
0,
0.... | [
"from django import newforms as forms",
"from django.utils.translation import gettext_lazy as _",
"from models import Profile",
"class ProfileForm(forms.ModelForm):\n class Meta:\n model = Profile\n exclude = ('user', 'report_to',)",
" class Meta:\n model = Profile\n exclud... |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import gettext_lazy as _
# Create your models here.
GENDER_CHOICES = (
('M', _('Male')),
('F', _('Female')),
)
class Profile(models.Model):
"""Employee
"""
# User.attribute: username, first_name, last_name, email, password,
# is_staff, is_active, is_superuser, last_login, date_joined
user = models.OneToOneField(User, primary_key=True)
report_to = models.ForeignKey('self', null=True, blank=True)
# profile
gender = models.CharField(max_length=1, choices=GENDER_CHOICES, radio_admin=True)
identification = models.CharField(max_length=18, unique=True)
birthday = models.DateField()
tel = models.CharField(max_length=50)
portrait = models.ImageField(upload_to='photo', blank=True, null=True)
class Meta:
pass
class Admin:
list_display = ('user', 'gender', 'identification', 'birthday', 'tel', 'portrait')
def __unicode__(self):
return self.user.username
def get_portrait(self):
if self.portrait:
return self.get_portrait_url()
else:
return 'images/default.png'
| [
[
1,
0,
0.0976,
0.0244,
0,
0.66,
0,
40,
0,
1,
0,
0,
40,
0,
0
],
[
1,
0,
0.122,
0.0244,
0,
0.66,
0.25,
808,
0,
1,
0,
0,
808,
0,
0
],
[
1,
0,
0.1463,
0.0244,
0,
0.66,... | [
"from django.db import models",
"from django.contrib.auth.models import User",
"from django.utils.translation import gettext_lazy as _",
"GENDER_CHOICES = (\n ('M', _('Male')),\n ('F', _('Female')),\n)",
"class Profile(models.Model):\n \"\"\"Employee\n\n \"\"\"\n # User.attribute: username, f... |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
#from django.contrib import auth
from django.contrib.auth.models import User
from models import Profile
from forms import ProfileForm, UserForm
# Create your views here.
def register(request):
"""
new user register.
"""
if request.method == 'POST':
user_form = UserForm(request.POST)
profile_form = ProfileForm(request.POST)
if user_form.is_valid() and profile_form.is_valid():
username = user_form.cleaned_data.get('login_name')
password = user_form.cleaned_data['password']
email = user_form.cleaned_data['email']
new_user = User.objects.create_user(username=username, email=email, password=password)
new_user.is_active = False
if new_user.save():
# save new profile for user.
new_profile = profile_form.save(commit=False)
new_profile.user = new_user
new_profile.save()
# if success
return HttpResponseRedirect('/')
else:
user_form = UserForm()
profile_form = ProfileForm()
return render_to_response('registration/register.html',
{'user_form': user_form, 'profile_form': profile_form},
context_instance=RequestContext(request))
| [
[
1,
0,
0.075,
0.025,
0,
0.66,
0,
779,
0,
2,
0,
0,
779,
0,
0
],
[
1,
0,
0.1,
0.025,
0,
0.66,
0.1667,
852,
0,
2,
0,
0,
852,
0,
0
],
[
1,
0,
0.125,
0.025,
0,
0.66,
... | [
"from django.http import HttpResponse, HttpResponseRedirect",
"from django.shortcuts import render_to_response, get_object_or_404",
"from django.template import RequestContext",
"from django.contrib.auth.models import User",
"from models import Profile",
"from forms import ProfileForm, UserForm",
"def r... |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from django import newforms as forms
from django.utils.translation import gettext_lazy as _
from models import Profile
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
exclude = ('user', 'report_to',)
class UserForm(forms.Form):
login_name = forms.CharField(max_length=30, min_length=4)
email = forms.EmailField()
password = forms.CharField(max_length=12, min_length=6, widget=forms.PasswordInput)
repassword = forms.CharField(max_length=12, min_length=6,
widget=forms.PasswordInput)
def clean_login_name(self):
"""
Verify that the username isn't already registered
"""
username = self.cleaned_data.get("login_name")
if not set(username).issubset("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_."):
raise forms.ValidationError(_("That login name has invalid characters. The valid values are letters, numbers and underscore."))
if User.objects.filter(username__iexact=username).count() == 0:
return username
else:
raise forms.ValidationError(_("The login name is already registered."))
def clean(self):
"""
Verify that the 2 passwords fields are equal
"""
if self.cleaned_data.get("password") == self.cleaned_data.get("repassword"):
return self.cleaned_data
else:
raise forms.ValidationError(_("The passwords inserted are different."))
def clean_email(self):
"""
Verify that the email exists
"""
email = self.cleaned_data.get("email")
try:
User.objects.get(email=email)
except:
return email
raise forms.ValidationError(_("That e-mail is already used."))
| [
[
1,
0,
0.0727,
0.0182,
0,
0.66,
0,
294,
0,
1,
0,
0,
294,
0,
0
],
[
1,
0,
0.0909,
0.0182,
0,
0.66,
0.25,
389,
0,
1,
0,
0,
389,
0,
0
],
[
1,
0,
0.1273,
0.0182,
0,
0.... | [
"from django import newforms as forms",
"from django.utils.translation import gettext_lazy as _",
"from models import Profile",
"class ProfileForm(forms.ModelForm):\n class Meta:\n model = Profile\n exclude = ('user', 'report_to',)",
" class Meta:\n model = Profile\n exclud... |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import gettext_lazy as _
# Create your models here.
GENDER_CHOICES = (
('M', _('Male')),
('F', _('Female')),
)
class Profile(models.Model):
"""Employee
"""
# User.attribute: username, first_name, last_name, email, password,
# is_staff, is_active, is_superuser, last_login, date_joined
user = models.OneToOneField(User, primary_key=True)
report_to = models.ForeignKey('self', null=True, blank=True)
# profile
gender = models.CharField(max_length=1, choices=GENDER_CHOICES, radio_admin=True)
identification = models.CharField(max_length=18, unique=True)
birthday = models.DateField()
tel = models.CharField(max_length=50)
portrait = models.ImageField(upload_to='photo', blank=True, null=True)
class Meta:
pass
class Admin:
list_display = ('user', 'gender', 'identification', 'birthday', 'tel', 'portrait')
def __unicode__(self):
return self.user.username
def get_portrait(self):
if self.portrait:
return self.get_portrait_url()
else:
return 'images/default.png'
| [
[
1,
0,
0.0976,
0.0244,
0,
0.66,
0,
40,
0,
1,
0,
0,
40,
0,
0
],
[
1,
0,
0.122,
0.0244,
0,
0.66,
0.25,
808,
0,
1,
0,
0,
808,
0,
0
],
[
1,
0,
0.1463,
0.0244,
0,
0.66,... | [
"from django.db import models",
"from django.contrib.auth.models import User",
"from django.utils.translation import gettext_lazy as _",
"GENDER_CHOICES = (\n ('M', _('Male')),\n ('F', _('Female')),\n)",
"class Profile(models.Model):\n \"\"\"Employee\n\n \"\"\"\n # User.attribute: username, f... |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
#from django.contrib import auth
from django.contrib.auth.models import User
from models import Profile
from forms import ProfileForm, UserForm
# Create your views here.
def register(request):
"""
new user register.
"""
if request.method == 'POST':
user_form = UserForm(request.POST)
profile_form = ProfileForm(request.POST)
if user_form.is_valid() and profile_form.is_valid():
username = user_form.cleaned_data.get('login_name')
password = user_form.cleaned_data['password']
email = user_form.cleaned_data['email']
new_user = User.objects.create_user(username=username, email=email, password=password)
new_user.is_active = False
if new_user.save():
# save new profile for user.
new_profile = profile_form.save(commit=False)
new_profile.user = new_user
new_profile.save()
# if success
return HttpResponseRedirect('/')
else:
user_form = UserForm()
profile_form = ProfileForm()
return render_to_response('registration/register.html',
{'user_form': user_form, 'profile_form': profile_form},
context_instance=RequestContext(request))
| [
[
1,
0,
0.075,
0.025,
0,
0.66,
0,
779,
0,
2,
0,
0,
779,
0,
0
],
[
1,
0,
0.1,
0.025,
0,
0.66,
0.1667,
852,
0,
2,
0,
0,
852,
0,
0
],
[
1,
0,
0.125,
0.025,
0,
0.66,
... | [
"from django.http import HttpResponse, HttpResponseRedirect",
"from django.shortcuts import render_to_response, get_object_or_404",
"from django.template import RequestContext",
"from django.contrib.auth.models import User",
"from models import Profile",
"from forms import ProfileForm, UserForm",
"def r... |
from django.contrib.auth.models import User
from pyamf.remoting.gateway.django import DjangoGateway
def user_list(request):
return User.objects.all()
gateway = DjangoGateway({
'userlist': user_list,
})
| [
[
1,
0,
0.1111,
0.1111,
0,
0.66,
0,
808,
0,
1,
0,
0,
808,
0,
0
],
[
1,
0,
0.2222,
0.1111,
0,
0.66,
0.3333,
653,
0,
1,
0,
0,
653,
0,
0
],
[
2,
0,
0.5,
0.2222,
0,
0.6... | [
"from django.contrib.auth.models import User",
"from pyamf.remoting.gateway.django import DjangoGateway",
"def user_list(request):\n return User.objects.all()",
" return User.objects.all()",
"gateway = DjangoGateway({\n 'userlist': user_list,\n})"
] |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Music(models.Model):
"""音乐
"""
title = models.CharField(max_length=50)
artist = models.CharField(max_length=50)
counting = models.IntegerField()
song = models.FileField(upload_to="song/%Y/%m")
join_time = models.DateTimeField(auto_now=True)
class Meta:
pass
class Admin:
pass
def __unicode__(self):
return self.title
class Comment(models.Model):
"""用户评论
"""
user = models.ForeignKey(User)
music = models.ForeignKey(Music)
rank = models.IntegerField()
content = models.TextField(null=True, blank=True)
send_time = models.DateTimeField(auto_now=True)
class Meta:
pass
class Admin:
pass
def __unicode__(self):
return "%s-%s" % (self.user.username, self.music.title, )
class UserMusic(models.Model):
"""用户播放列表
"""
user = models.ForeignKey(User)
music = models.ForeignKey(Music)
counting = models.IntegerField()
active = models.BooleanField(default=True)
class Meta:
pass
class Admin:
pass
def __unicode__(self):
return "%s: %s" % (self.user.username, self.music.title, )
class Category(models.Model):
"""音乐分类
"""
title = models.CharField(max_length=50)
description = models.CharField(max_length=500)
class Meta:
pass
class Admin:
pass
def __unicode__(self):
return self.title
class CategoryMusic(models.Model):
"""分类音乐列表
"""
category = models.ForeignKey(Category)
music = models.ForeignKey(Music)
counting = models.IntegerField(default=0)
class Meta:
pass
class Admin:
pass
def __unicode__(self):
return "%s:%s" % (self.category.title, self.music.title, )
| [
[
1,
0,
0.0319,
0.0106,
0,
0.66,
0,
40,
0,
1,
0,
0,
40,
0,
0
],
[
1,
0,
0.0426,
0.0106,
0,
0.66,
0.1667,
808,
0,
1,
0,
0,
808,
0,
0
],
[
3,
0,
0.1596,
0.1809,
0,
0.... | [
"from django.db import models",
"from django.contrib.auth.models import User",
"class Music(models.Model):\n \"\"\"音乐\n \"\"\"\n title = models.CharField(max_length=50)\n artist = models.CharField(max_length=50)\n counting = models.IntegerField()\n song = models.FileField(upload_to=\"song/%Y/%... |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Music(models.Model):
"""音乐
"""
title = models.CharField(max_length=50)
artist = models.CharField(max_length=50)
counting = models.IntegerField()
song = models.FileField(upload_to="song/%Y/%m")
join_time = models.DateTimeField(auto_now=True)
class Meta:
pass
class Admin:
pass
def __unicode__(self):
return self.title
class Comment(models.Model):
"""用户评论
"""
user = models.ForeignKey(User)
music = models.ForeignKey(Music)
rank = models.IntegerField()
content = models.TextField(null=True, blank=True)
send_time = models.DateTimeField(auto_now=True)
class Meta:
pass
class Admin:
pass
def __unicode__(self):
return "%s-%s" % (self.user.username, self.music.title, )
class UserMusic(models.Model):
"""用户播放列表
"""
user = models.ForeignKey(User)
music = models.ForeignKey(Music)
counting = models.IntegerField()
active = models.BooleanField(default=True)
class Meta:
pass
class Admin:
pass
def __unicode__(self):
return "%s: %s" % (self.user.username, self.music.title, )
class Category(models.Model):
"""音乐分类
"""
title = models.CharField(max_length=50)
description = models.CharField(max_length=500)
class Meta:
pass
class Admin:
pass
def __unicode__(self):
return self.title
class CategoryMusic(models.Model):
"""分类音乐列表
"""
category = models.ForeignKey(Category)
music = models.ForeignKey(Music)
counting = models.IntegerField(default=0)
class Meta:
pass
class Admin:
pass
def __unicode__(self):
return "%s:%s" % (self.category.title, self.music.title, )
| [
[
1,
0,
0.0319,
0.0106,
0,
0.66,
0,
40,
0,
1,
0,
0,
40,
0,
0
],
[
1,
0,
0.0426,
0.0106,
0,
0.66,
0.1667,
808,
0,
1,
0,
0,
808,
0,
0
],
[
3,
0,
0.1596,
0.1809,
0,
0.... | [
"from django.db import models",
"from django.contrib.auth.models import User",
"class Music(models.Model):\n \"\"\"音乐\n \"\"\"\n title = models.CharField(max_length=50)\n artist = models.CharField(max_length=50)\n counting = models.IntegerField()\n song = models.FileField(upload_to=\"song/%Y/%... |
# Create your views here.
| [] | [] |
#
# 3DStudyMaze is a Panda3D application for building and
# running an education course in virtual environment.
#
# It will feature:
# - Simple maze definition language
# - Connectivity between mazes
# - Auto-adjusting room and tunnel sizes
# - Support-ticket instructor locks
# - LaTeX-editable walls
#
# It is based on the idea, published on Halfbakery:
# http://www.halfbakery.com/idea/3D_20Study_20Maze
#
# Panda3D game engine is available at:
# http://www.panda3d.org
#
# Inyuki < Made in The Internet >
# Contact: mindey@gmail.com
#
# The initial commit is based on Panda3D sample program named "Bump-Mapping":
# http://www.panda3d.org/manual/index.php/Sample_Programs:_Normal_Mapping
# Licensed, probably, under http://www.panda3d.org/license.php
#
import direct.directbase.DirectStart
from panda3d.core import WindowProperties
from panda3d.core import TextNode
from panda3d.core import Point2, Point3, Vec3, Vec4
from direct.task.Task import Task
from direct.gui.OnscreenText import OnscreenText
from direct.showbase.DirectObject import DirectObject
from pandac.PandaModules import CardMaker
import sys, os
from pandac.PandaModules import Texture
from PIL import Image, ImageDraw
from pandac.PandaModules import TransparencyAttrib
from copy import copy
from direct.gui.DirectEntry import DirectEntry
from panda3d.core import TextureStage
rootNode = render.attachNewNode( 'rootNode' )
# We need a starting point for drawing either a room or a tunnel.
class Entrance:
def __init__(self, WallTopLeft=Point3(), offset=Point2(), dim=Point2(0,0), direction='back', kind='entrance' ):
self.dim = dim
self.direction = direction
self.offset = offset
if direction == 'left':
ExitLeftTop = Point3(WallTopLeft + Point3( 0, offset.x, -offset.y) )
ExitRightTop = Point3(ExitLeftTop + Point3( 0, dim.x , 0) )
ExitLeftBottom = Point3(ExitLeftTop + Point3( 0, 0 , -dim.y) )
ExitRightBottom = Point3(ExitLeftTop + Point3( 0, dim.x , -dim.y) )
if direction == 'right':
ExitLeftTop = Point3(WallTopLeft + Point3( 0, -offset.x, -offset.y) )
ExitRightTop = Point3(ExitLeftTop + Point3( 0, -dim.x , 0) )
ExitLeftBottom = Point3(ExitLeftTop + Point3( 0, 0 , -dim.y) )
ExitRightBottom = Point3(ExitLeftTop + Point3( 0, -dim.x , -dim.y) )
if direction == 'back':
ExitLeftTop = Point3(WallTopLeft + Point3( -offset.x, 0, -offset.y) )
ExitRightTop = Point3(ExitLeftTop + Point3( -dim.x , 0, 0) )
ExitLeftBottom = Point3(ExitLeftTop + Point3( 0 , 0, -dim.y) )
ExitRightBottom = Point3(ExitLeftTop + Point3( -dim.x , 0, -dim.y) )
if direction == 'front':
ExitLeftTop = Point3(WallTopLeft + Point3( offset.x, 0, -offset.y) )
ExitRightTop = Point3(ExitLeftTop + Point3( dim.x , 0, 0) )
ExitLeftBottom = Point3(ExitLeftTop + Point3( 0 , 0, -dim.y) )
ExitRightBottom = Point3(ExitLeftTop + Point3( dim.x , 0, -dim.y) )
if direction == 'bottom':
ExitLeftTop = Point3(WallTopLeft + Point3( offset.x, -offset.y, 0) )
ExitRightTop = Point3(ExitLeftTop + Point3( dim.x , 0, 0) )
ExitLeftBottom = Point3(ExitLeftTop + Point3( 0 , -dim.y, 0) )
ExitRightBottom = Point3(ExitLeftTop + Point3( dim.x , -dim.y, 0) )
if direction == 'top':
ExitLeftTop = Point3(WallTopLeft + Point3( offset.x, offset.y, 0) )
ExitRightTop = Point3(ExitLeftTop + Point3( dim.x , 0, 0) )
ExitLeftBottom = Point3(ExitLeftTop + Point3( 0 , dim.y, 0) )
ExitRightBottom = Point3(ExitLeftTop + Point3( dim.x , dim.y, 0) )
if kind == 'entrance':
self.ExitLeftTop = ExitLeftTop
self.ExitRightTop = ExitRightTop
self.ExitLeftBottom = ExitLeftBottom
self.ExitRightBottom = ExitRightBottom
if kind == 'exit':
if direction in ['left', 'right', 'back', 'front']:
self.ExitLeftTop = ExitRightTop
self.ExitRightTop = ExitLeftTop
self.ExitLeftBottom = ExitRightBottom
self.ExitRightBottom = ExitLeftBottom
if direction in ['bottom', 'top']:
self.ExitLeftTop = ExitLeftBottom
self.ExitRightTop = ExitRightBottom
self.ExitLeftBottom = ExitLeftTop
self.ExitRightBottom = ExitRightTop
if direction == 'left': self.direction = 'right'
if direction == 'right': self.direction = 'left'
if direction == 'back': self.direction = 'front'
if direction == 'front': self.direction = 'back'
if direction == 'bottom': self.direction = 'top'
if direction == 'top': self.direction = 'bottom'
class Wall:
def __init__(self, LeftBottom=Point3( -15, 50, -10), RightBottom=Point3( 15, 50, -10), \
LeftTop=Point3( -15, 50, 10), RightTop=Point3( 15, 50, 10) ):
self.cm = CardMaker('card')
self.cm.setUvRange( Point2( 0, 0 ), Point2( 1, 1) )
self.cm.setFrame( LeftBottom, RightBottom, RightTop, LeftTop )
self.card = rootNode.attachNewNode(self.cm.generate())
class Room:
def __init__(self, RightFrontTop, RightFrontBottom, RightBackTop, RightBackBottom, \
LeftFrontTop, LeftFrontBottom, LeftBackTop, LeftBackBottom):
self.left_wall = Wall( LeftBackBottom, LeftFrontBottom, LeftBackTop, LeftFrontTop, Tile='left.png')
self.right_wall = Wall( RightFrontBottom, RightBackBottom, RightFrontTop, RightBackTop, Tile='right.png')
self.back_wall = Wall( RightBackBottom, LeftBackBottom, RightBackTop, LeftBackTop, Tile='back.png')
self.front_wall = Wall( LeftFrontBottom, RightFrontBottom, LeftFrontTop, RightFrontTop, Tile='front.png')
self.bottom_wall = Wall( LeftBackBottom, RightBackBottom, LeftFrontBottom, RightFrontBottom, Tile='bottom.png')
self.top_wall = Wall( LeftFrontTop, RightFrontTop, LeftBackTop, RightBackTop, Tile='top.png')
class Tunnel:
def __init__(self, RightFrontTop, RightFrontBottom, RightBackTop, RightBackBottom, \
LeftFrontTop, LeftFrontBottom, LeftBackTop, LeftBackBottom):
self.left_wall = Wall( LeftBackBottom, LeftFrontBottom, LeftBackTop, LeftFrontTop, Tile='left.png')
self.right_wall = Wall( RightFrontBottom, RightBackBottom, RightFrontTop, RightBackTop, Tile='right.png')
self.bottom_wall = Wall( LeftBackBottom, RightBackBottom, LeftFrontBottom, RightFrontBottom, Tile='bottom.png')
self.top_wall = Wall( LeftFrontTop, RightFrontTop, LeftBackTop, RightBackTop, Tile='top.png')
class Poster(Wall):
def __init__(self, WallTopLeft=Point3(), PosterLeftTop=Point2(), directions=('y+','z+'), document='hi-res.png', scale=1.0, aspect='3:4' ):
# Here, just needs to determine the positions of the 'LeftTop', 'RightTop', 'LeftBottom', 'RightBottom' points.
# They all will depend on the coordinate of the maze wall's point of reference (e.g., LeftTop corner of the wall),
# and the position, and the directions, along which to add the position.x and position.y coordinates.
# Determine dimensions of the document:
if document.split('.')[-1] in [ 'avi', 'mp4' ]:
# If the document is video:
if aspect == '9:16':
img_h, img_w = 9*200*scale, 16*200*scale
else:
img_h, img_w = 3*500*scale, 4*500*scale
else:
# If the document is image:
img=Image.open(document,'r')
img_w, img_h = img.size
document_x, document_y = scale*img_w/100, scale*img_h/100
# Save filename for later use in activating videos.
self.document = document
# Margin from the wall
m = 0.01
# Margin for activation area (it could later be a function of img_h, img_w):
a = 5.
# The coordinates depend on the orientation of the wall, here specified by 'directions' instead of 'face'.
# Maybe I should just have rotation matrices for each case, and write it more compactly, but for now:
if directions == ('y+','z+'): # Poster on left wall
margin = Point3(m,0,0)
active = Point3(a,0,0)
PosterLeftTop = Point3(WallTopLeft + Point3( 0, PosterLeftTop.x, -PosterLeftTop.y) + margin)
PosterRightTop = Point3(PosterLeftTop + Point3( 0, document_x , 0) )
PosterLeftBottom = Point3(PosterLeftTop + Point3( 0, 0 , -document_y) )
PosterRightBottom = Point3(PosterLeftTop + Point3( 0, document_x , -document_y) )
if directions == ('y-','z+'): # Poster on right wall
margin = Point3(-m,0,0)
active = Point3(-a,0,0)
PosterLeftTop = Point3(WallTopLeft + Point3( 0, -PosterLeftTop.x, -PosterLeftTop.y) + margin)
PosterRightTop = Point3(PosterLeftTop + Point3( 0, -document_x , 0) )
PosterLeftBottom = Point3(PosterLeftTop + Point3( 0, 0 , -document_y) )
PosterRightBottom = Point3(PosterLeftTop + Point3( 0, -document_x , -document_y) )
if directions == ('x-','z+'): # Poster on back wall
margin = Point3(0,m,0)
active = Point3(0,a,0)
PosterLeftTop = Point3(WallTopLeft + Point3( -PosterLeftTop.x, 0, -PosterLeftTop.y) + margin)
PosterRightTop = Point3(PosterLeftTop + Point3( -document_x , 0, 0) )
PosterLeftBottom = Point3(PosterLeftTop + Point3( 0 , 0, -document_y) )
PosterRightBottom = Point3(PosterLeftTop + Point3( -document_x , 0, -document_y) )
if directions == ('x+','z+'): # Poster on front wall
margin = Point3(0,-m,0)
active = Point3(0,-a,0)
PosterLeftTop = Point3(WallTopLeft + Point3( PosterLeftTop.x, 0, -PosterLeftTop.y) + margin)
PosterRightTop = Point3(PosterLeftTop + Point3( document_x , 0, 0) )
PosterLeftBottom = Point3(PosterLeftTop + Point3( 0 , 0, -document_y) )
PosterRightBottom = Point3(PosterLeftTop + Point3( document_x , 0, -document_y) )
if directions == ('x+','y+'): # Poster on bottom wall
margin = Point3(0,0,m)
active = Point3(0,0,a)
PosterLeftTop = Point3(WallTopLeft + Point3( PosterLeftTop.x, -PosterLeftTop.y, 0) + margin)
PosterRightTop = Point3(PosterLeftTop + Point3( document_x , 0, 0) )
PosterLeftBottom = Point3(PosterLeftTop + Point3( 0 , -document_y, 0) )
PosterRightBottom = Point3(PosterLeftTop + Point3( document_x , -document_y, 0) )
if directions == ('x+','y-'): # Poster on top wall
margin = Point3(0,0,-m)
active = Point3(0,0,-a)
PosterLeftTop = Point3(WallTopLeft + Point3( PosterLeftTop.x, PosterLeftTop.y, 0) + margin)
PosterRightTop = Point3(PosterLeftTop + Point3( document_x , 0, 0) )
PosterLeftBottom = Point3(PosterLeftTop + Point3( 0 , document_y, 0) )
PosterRightBottom = Point3(PosterLeftTop + Point3( document_x , document_y, 0) )
Wall.__init__(self, LeftBottom=PosterLeftBottom, RightBottom=PosterRightBottom, \
LeftTop=PosterLeftTop, RightTop=PosterRightTop )
self.tex = loader.loadTexture(document)
self.card.setTexture(self.tex)
if document.split('.')[-1] in [ 'avi', 'mp4' ]:
self.card.setTexScale(TextureStage.getDefault(), self.tex.getTexScale())
self.media = loader.loadSfx(document)
self.tex.synchronizeTo(self.media)
else:
self.tex.setMinfilter(Texture.FTLinearMipmapLinear)
# Poster activation area (used for starting the playing of a movie)
# For activation area
FurtherLeftTop = Point3(PosterLeftTop + active)
FurtherRightTop = Point3(PosterRightTop + active)
FurtherLeftBottom = Point3(PosterLeftBottom + active)
FurtherRightBottom= Point3(PosterRightBottom + active)
PointList = [PosterLeftTop, PosterRightTop, PosterLeftBottom, PosterRightBottom, \
FurtherLeftTop, FurtherRightTop, FurtherLeftBottom, FurtherRightBottom]
self.x_minus = min([item.x for item in PointList])
self.x_plus = max([item.x for item in PointList])
self.y_minus = min([item.y for item in PointList])
self.y_plus = max([item.y for item in PointList])
self.z_minus = min([item.z for item in PointList])
self.z_plus = max([item.z for item in PointList])
def addPage():
pass
def addFrame():
pass
def flipPage():
pass
class MazeWall:
def __init__(self, origin=Point3(0,50,0), dim=Point2(120,120), orientation='front', reference='center' ):
self.exits = []
# Possible references of the wall: center and corners
references = [(0, ( 0., 0.), 'xy', 'center' ),
(1, ( 1., 1.), 'x+y+', 'LeftBottom' ),
(2, ( 1.,-1.), 'x+y-', 'LeftTop' ),
(3, ( -1., 1.), 'x-y+', 'RightBottom'),
(4, ( -1, -1.), 'x-y-', 'RightTop' )]
# Finding index of the reference
for item in references:
if reference in item:
reference = item[0]
# Possible orientations of the wall: corresponding to the walls they are used to draw for a room.
orientations = [(0, 'x-', 'left' ),
(1, 'x+', 'right' ),
(2, 'y-', 'back' ),
(3, 'y+', 'front' ),
(4, 'z-', 'bottom'),
(5, 'z+', 'top' )]
# Finding index of the orientation
for item in orientations:
if orientation in item:
orientation = item[0]
# Delta is half the distance from center
D = Point2( dim.x/2., dim.y/2.)
d = Point2( references[reference][1] )
delta2d = Point2(D.x*d.x, D.y*d.y)
# Possible four corners in 2d:
LeftBottom2d = Point2(0,0) + Point2(-dim.x/2., -dim.y/2.) + delta2d
RightBottom2d = Point2(0,0) + Point2( dim.x/2., -dim.y/2.) + delta2d
LeftTop2d = Point2(0,0) + Point2(-dim.x/2., dim.y/2.) + delta2d
RightTop2d = Point2(0,0) + Point2( dim.x/2., dim.y/2.) + delta2d
corners2d = [LeftBottom2d, RightBottom2d, LeftTop2d, RightTop2d]
# What 3D coordinates these delta has to be applied to, depends on card's orientation:
if orientation == 0: x = 'y+'; y = 'z+'
if orientation == 1: x = 'y-'; y = 'z+'
if orientation == 2: x = 'x-'; y = 'z+'
if orientation == 3: x = 'x+'; y = 'z+'
if orientation == 4: x = 'x+'; y = 'y+'
if orientation == 5: x = 'x+'; y = 'y-'
# Saving for use in method hangPoster
self.x = x; self.y = y
# Saving for use in method hangExit
self.orientation = orientations[orientation][2]
# Saving for use in method cutWindow
self.dim = dim
# Possible four corners in 3d:
corners3d = [Point3(0,0,0)+origin, Point3(0,0,0)+origin, Point3(0,0,0)+origin, Point3(0,0,0)+origin]
if x == 'x+':
for i in range(4): corners3d[i] += Point3(corners2d[i].x, 0., 0.)
if x == 'x-':
for i in range(4): corners3d[i] -= Point3(corners2d[i].x, 0., 0.)
if x == 'y+':
for i in range(4): corners3d[i] += Point3(0., corners2d[i].x, 0.)
if x == 'y-':
for i in range(4): corners3d[i] -= Point3(0., corners2d[i].x, 0.)
if y == 'y+':
for i in range(4): corners3d[i] += Point3(0., corners2d[i].y, 0.)
if y == 'y-':
for i in range(4): corners3d[i] -= Point3(0., corners2d[i].y, 0.)
if y == 'z+':
for i in range(4): corners3d[i] += Point3(0., 0., corners2d[i].y)
self.LeftBottom = Point3(corners3d[0])
self.RightBottom = Point3(corners3d[1])
self.LeftTop = Point3(corners3d[2])
self.RightTop = Point3(corners3d[3])
self.wall = Wall( self.LeftBottom, self.RightBottom, self.LeftTop, self.RightTop )
self.posters = {}
# Store texture filename as a variable
self.texture = os.path.join('temp', str(id(self))+'_tiled.png')
self.alpha = os.path.join('temp', str(id(self))+'_alpha.png')
self.addTexture( Tile=orientations[orientation][2]+'.png', repeat=(10,10), bg_dim = (dim.x*10,dim.y*10) )
self.addAlpha()
self.updateTexture()
def addTexture(self, Tile='default.png', repeat=(10,10), bg_dim=(0,0)):
Tile = os.path.join('textures', Tile)
# In case the Tile file does not exist, create and use default.png, so that it could be changed later.
if not os.path.isfile(os.path.join(os.getcwd(),Tile)):
im = Image.new('RGB', (150,150)); draw = ImageDraw.Draw(im)
draw.rectangle([(0, 0), (150, 150)], outline='white'); draw.text((55, 70), "Default")
im.save(os.path.join('textures', 'default.png'), 'PNG'); Tile = os.path.join('textures', 'default.png')
# Reading the tile
img=Image.open(Tile,'r')
img_w, img_h = img.size
# If the wall's dimensions in pixelsaren't specified
if bg_dim[0] == 0 or bg_dim[1] == 0:
# We tile by creating the image of the size it would take if you repeat the tiles that many times.
background = Image.new("RGB", ( img_w*repeat[0], img_h*repeat[1]), (255,255,255))
for i in range(repeat[0]):
for j in range(repeat[1]):
background.paste(img,(i*img_w, j*img_h))
else:
# We tile into the dimensions:
background = Image.new("RGB", (int(bg_dim[0]), int(bg_dim[1])), (255,255,255))
for i in range(int(bg_dim[0]/img_w+1)):
for j in range(int(bg_dim[1]/img_h+1)):
background.paste(img,(i*img_w,j*img_h))
# We add a one-pixel white corner:
draw = ImageDraw.Draw(background)
draw.rectangle([(0, 0), (img_w*repeat[0], img_h*repeat[1])], outline='white')
background.save(self.texture, 'PNG')
def addAlpha(self):
# Making transparency mapping file
im = Image.new("RGB", (int(self.dim.x)*10, int(self.dim.y)*10), (255,255,255)); draw = ImageDraw.Draw(im)
im.save(self.alpha)
def cutWindow(self, window=(Point2(25,20),Point2(100,100))):
# Reading the alpha tile
im = Image.open(self.alpha,'r')
img_w, img_h = im.size
# How many pixels per 'blender unit'
ratio_x, ratio_y = img_w/self.dim.x, img_h/self.dim.y
draw = ImageDraw.Draw(im)
draw.rectangle([(int(ratio_x*window[0].x), int(ratio_y*window[0].y)), \
(int(ratio_x*window[1].x-1), int(ratio_y*window[1].y-1))], fill="#000000")
im.save(self.alpha, "PNG")
self.updateTexture()
def updateTexture(self):
# Try to add default textures depending on the orientation of the wall
try:
self.wall.tex = loader.loadTexture(self.texture, self.alpha)
self.wall.tex.reload()
self.wall.card.setTexture(self.wall.tex)
self.wall.card.setTransparency(True)
self.wall.card.setTransparency(TransparencyAttrib.MBinary)
except:
pass
def newTexture(self, Tile='default.png', repeat=(10,10), bg_dim=(0,0)):
if not os.path.isfile('textures/%s' % Tile.split('/')[-1]):
os.system('cp %s textures/' % Tile)
else:
if not os.stat('textures/%s' % Tile.split('/')[-1]).st_size == os.stat(Tile).st_size:
os.system('cp %s textures/' % Tile)
Tile = Tile.split('/')[-1]
self.addTexture(Tile=Tile, repeat=repeat, bg_dim=bg_dim)
self.updateTexture()
def hangPoster(self, document='hi-res.png', offset=Point2(1,1), scale=1.0, aspect='3:4'):
self.posters[document] = Poster( self.LeftTop, offset, (self.x, self.y), document, scale, aspect )
def hangPosters(self, folder='.', starting=1, rows=3, cols=4, scale=1.0, spacing=0.25, offset=Point2(0,0)):
# ( With an assumption that all the pages of the book are equal rectangles )
# Get the pixel size of one page
img=Image.open(os.path.join(folder,str(starting)+'.png'),'r')
img_w, img_h = img.size
# Get the dimensions of a single page in blender units
doc_x, doc_y = scale*img_w/100, scale*img_h/100
# Compute the dimensions of the resulting mat, with spacing
rez_x = doc_x * cols + spacing * (cols + 1)
rez_y = doc_y * rows + spacing * (rows + 1)
# Return an informative error if the size exceeds dimensions of the wall
if self.dim.x < rez_x or self.dim.y < rez_y:
print 'Either height or width exceed the corresponding dimension of the wall.'
print 'Dimensions of the wall: x=%s, y=%s' % (self.dim.x, self.dim.y)
print 'Dimensions of the mat: x=%s, y=%s' % (rez_x, rez_y)
else:
# Compute the upper limits for coordinates within which this point can be:
max_x = self.dim.x - rez_x
max_y = self.dim.y - rez_y
# IF LeftTop corner of the mat is not outside the limits:
if 0.0 <= offset.x < max_x and 0.0 <= offset.y < max_y:
print 'Adding the pages to a wall.'
I = offset + Point2(spacing, spacing)
D = Point2(doc_x+spacing, doc_y+spacing)
i = 0
for row in range(rows):
for col in range(cols):
fn = os.path.join(folder,str(starting+i)+'.png')
pos = I + Point2(col*D.x,0) + Point2(0,row*D.y)
if os.path.isfile(fn):
self.hangPoster(fn, pos, scale)
i += 1
else:
print 'Although dimensions are right, your offset point is out of range.'
print 'Range allows for offset: 0.0 < x < %s, 0.0 < y < %s' % (max_x, max_y)
print 'The actual dimensions of the offset: x=%s, y=%s' % (offset.x, offset.y)
def hangExit(self, offset=Point2(1,1), dim=Point2(5,8)):
self.cutWindow((offset, Point2(offset+dim)))
e = Entrance( self.LeftTop, offset, dim, self.orientation, kind='exit')
self.exits.append( e )
return e
def hangTunnel(self, offset=Point2(1,1), dim=Point2(5,8), questions = [('problem1.png', 'answer1'), \
('problem2.png', 'answer2'), \
('problem3.png', 'answer3'), \
]):
exit = self.hangExit(offset, dim)
self.exits.append(exit)
tunnel = MazeTunnel(exit, questions)
return tunnel
class MazeTunnel:
def __init__(self, entrance, questions=[('problem1.png', 'answer1')]):
# Since tunnel is a container of questions, it is appropriate to start estimating the length
# depending on the questions answered, and only add the margin 'epsilon' at the end of tunnel
# once all the questions had been answered.
# At this moment, the questions will have to be passed to constructor function. However, it is preferable to have flexible 'addQuestion' method.
# The problem is that all the later rooms' coordinates will depend on the tunnel's length, so the part of maze that follows that tunnel would have
# to be all updated.
# Here, I added 'invisible question', as it is not actually appearing, after all: in this implementation,
# we let the user thrugh the tunnel when the current answer is equal to the number of questions.
questions += [('invisible', 'question')]
self.questions = questions
self.length = len(questions)*10
self.epsilon = 5
self.which = Point3(0,0,0)
# LEFT RIGHT BOTTOM TOP
if entrance.direction == 'left': # x+
self.directions = ['front','back','bottom','top']
self.left = MazeWall(entrance.ExitRightTop, Point2(self.length, entrance.dim.y), orientation='front', reference='LeftTop')
self.right = MazeWall(entrance.ExitLeftTop, Point2(self.length, entrance.dim.y), orientation='back', reference='RightTop')
self.bottom = MazeWall(entrance.ExitRightBottom, Point2(self.length, entrance.dim.x), orientation='bottom', reference='LeftTop')
self.top = MazeWall(entrance.ExitRightTop, Point2(self.length, entrance.dim.x), orientation='top', reference='LeftBottom')
self.exit = Entrance(entrance.ExitLeftTop+Point3(self.length,0,0), offset=Point2(), dim=entrance.dim, direction=entrance.direction, kind='entrance')
self.dim = Point3(self.length, entrance.dim.x, entrance.dim.y)
self.which.x = 1
if entrance.direction == 'right': # x-
self.directions = ['back','front','bottom','top']
self.left = MazeWall(entrance.ExitRightTop, Point2(self.length,entrance.dim.y), orientation='back', reference='LeftTop')
self.right = MazeWall(entrance.ExitLeftTop, Point2(self.length,entrance.dim.y), orientation='front', reference='RightTop')
self.bottom = MazeWall(entrance.ExitRightBottom, Point2(self.length, entrance.dim.x), orientation='bottom', reference='RightBottom')
self.top = MazeWall(entrance.ExitRightTop, Point2(self.length, entrance.dim.x), orientation='top', reference='RightTop')
self.exit = Entrance(entrance.ExitLeftTop+Point3(-self.length,0,0), offset=Point2(), dim=entrance.dim, direction=entrance.direction, kind='entrance')
self.dim = Point3(self.length, entrance.dim.x, entrance.dim.y)
self.which.x = -1
if entrance.direction == 'back': # y+
self.directions = ['left','right','bottom','top']
self.left = MazeWall(entrance.ExitRightTop, Point2(self.length,entrance.dim.y), orientation='left', reference='LeftTop')
self.right = MazeWall(entrance.ExitLeftTop, Point2(self.length,entrance.dim.y), orientation='right', reference='RightTop')
self.bottom = MazeWall(entrance.ExitRightBottom, Point2(entrance.dim.x, self.length), orientation='bottom', reference='LeftBottom')
self.top = MazeWall(entrance.ExitLeftTop, Point2(entrance.dim.x, self.length), orientation='top', reference='RightTop')
self.exit = Entrance(entrance.ExitLeftTop+Point3(0,self.length,0), offset=Point2(), dim=entrance.dim, direction=entrance.direction, kind='entrance')
self.dim = Point3(entrance.dim.x, self.length, entrance.dim.y)
self.which.y = 1
if entrance.direction == 'front': # y-
self.directions = ['right','left','bottom','top']
self.left = MazeWall(entrance.ExitRightTop, Point2(self.length,entrance.dim.y), orientation='right', reference='LeftTop')
self.right = MazeWall(entrance.ExitLeftTop, Point2(self.length,entrance.dim.y), orientation='left', reference='RightTop')
self.bottom = MazeWall(entrance.ExitRightBottom, Point2(entrance.dim.x,self.length), orientation='bottom', reference='RightTop')
self.top = MazeWall(entrance.ExitLeftTop, Point2(entrance.dim.x,self.length), orientation='top', reference='LeftBottom')
self.exit = Entrance(entrance.ExitLeftTop+Point3(0,-self.length,0), offset=Point2(), dim=entrance.dim, direction=entrance.direction, kind='entrance')
self.dim = Point3(entrance.dim.x, self.length, entrance.dim.y)
self.which.y = -1
if entrance.direction == 'bottom': # z+
self.directions = ['left','right','front','back']
self.left = MazeWall(entrance.ExitLeftBottom, Point2(entrance.dim.y,self.length), orientation='left', reference='LeftBottom')
self.right = MazeWall(entrance.ExitRightTop, Point2(entrance.dim.y,self.length), orientation='right', reference='LeftBottom')
self.bottom = MazeWall(entrance.ExitRightTop, Point2(entrance.dim.x,self.length), orientation='front', reference='RightBottom')
self.top = MazeWall(entrance.ExitRightBottom, Point2(entrance.dim.x, self.length), orientation='back', reference='LeftBottom')
self.exit = Entrance(entrance.ExitLeftTop+Point3(0,0,self.length), offset=Point2(), dim=entrance.dim, direction=entrance.direction, kind='entrance')
self.dim = Point3(entrance.dim.x, entrance.dim.y, self.length)
self.which.z = 1
if entrance.direction == 'top':
self.directions = ['left','right','back','front']
self.left = MazeWall(entrance.ExitLeftTop, Point2(entrance.dim.y,self.length), orientation='left', reference='LeftTop')
self.right = MazeWall(entrance.ExitRightTop, Point2(entrance.dim.y,self.length), orientation='right', reference='RightTop')
self.bottom = MazeWall(entrance.ExitLeftTop, Point2(entrance.dim.x,self.length), orientation='back', reference='RightTop')
self.top = MazeWall(entrance.ExitLeftBottom, Point2(entrance.dim.x, self.length), orientation='front', reference='LeftTop')
self.exit = Entrance(entrance.ExitLeftTop+Point3(0,0,-self.length), offset=Point2(), dim=entrance.dim, direction=entrance.direction, kind='entrance')
self.dim = Point3(entrance.dim.x, entrance.dim.y, self.length)
self.which.z = -1
# Find the center of the area:
total = entrance.ExitLeftTop + self.exit.ExitRightBottom
self.center = Point3(total.x/2, total.y/2, total.z/2)
self.update_limits()
def update_limits(self, current=1):
# Update the number of problems solved in this tunnel.
try:
self.current_problem += 1
except:
self.current_problem = current
self.answer = self.questions[self.current_problem-1][1]
remaining_problems = len(self.questions) - self.current_problem
# Add epsilon only to the nearer end of the tunnel:
self.x_minus = self.center.x - self.dim.x/2
if self.which.x == 1: self.x_minus -= abs(self.which.x)*self.epsilon
self.x_plus = self.center.x + self.dim.x/2
if self.which.x ==-1: self.x_plus += abs(self.which.x)*self.epsilon
self.y_minus = self.center.y - self.dim.y/2
if self.which.y == 1: self.y_minus -= abs(self.which.y)*self.epsilon
self.y_plus = self.center.y + self.dim.y/2
if self.which.y ==-1: self.y_plus += abs(self.which.y)*self.epsilon
self.z_minus = self.center.z - self.dim.z/2
if self.which.z == 1: self.z_minus -= abs(self.which.z)*self.epsilon
self.z_plus = self.center.z + self.dim.z/2
if self.which.z ==-1: self.z_plus += abs(self.which.z)*self.epsilon
# Correction depending on questions answered:
if self.which.x == 1: self.x_plus -= remaining_problems*10.
if self.which.x ==-1: self.x_minus += remaining_problems*10.
if self.which.y == 1: self.y_plus -= remaining_problems*10.
if self.which.y ==-1: self.y_minus += remaining_problems*10.
if self.which.z == 1: self.z_plus -= remaining_problems*10.
if self.which.z ==-1: self.z_minus += remaining_problems*10.
# If all of the poroblems had been solved, add epsilon to the further end of the tunnel:
if self.current_problem == len(self.questions):
if self.which.x ==-1: self.x_minus -= abs(self.which.x)*self.epsilon
if self.which.x == 1: self.x_plus += abs(self.which.x)*self.epsilon
if self.which.y ==-1: self.y_minus -= abs(self.which.y)*self.epsilon
if self.which.y == 1: self.y_plus += abs(self.which.y)*self.epsilon
if self.which.z ==-1: self.z_minus -= abs(self.which.z)*self.epsilon
if self.which.z == 1: self.z_plus += abs(self.which.z)*self.epsilon
# For active zone (zone in which the question is activated)
# Start with copying the coordinates of the tunnel zone:
self.active_x_minus = copy(self.x_minus)
self.active_x_plus = copy(self.x_plus)
self.active_y_minus = copy(self.y_minus)
self.active_y_plus = copy(self.y_plus)
self.active_z_minus = copy(self.z_minus)
self.active_z_plus = copy(self.z_plus)
# If not all problems of the tunnel are solved, then, depending on the direction of the tunnel, modify
if self.current_problem < len(self.questions):
# the limits along that axis by either adding or subtracting from either positive or negative tunnel limit.
if self.which == Point3(-1, 0, 0): # x-
self.active_x_minus = copy(self.x_minus)
self.active_x_plus = self.active_x_minus + 10
if self.which == Point3( 1, 0, 0): # x+
self.active_x_plus = copy(self.x_plus)
self.active_x_minus = self.active_x_plus - 10
if self.which == Point3( 0,-1, 0): # y-
self.active_y_minus = copy(self.y_minus)
self.active_y_plus = self.active_y_minus + 10
if self.which == Point3( 0, 1, 0): # y+
self.active_y_plus = copy(self.y_plus)
self.active_y_minus = self.active_y_plus - 10
if self.which == Point3( 0, 0,-1): # z-
self.active_z_minus = copy(self.z_minus)
self.active_z_plus = self.active_z_minus + 10
if self.which == Point3( 0, 0, 1): # z+
self.active_z_plus = copy(self.z_plus)
self.active_z_minus = self.active_z_plus - 10
else:
# Make arbitrary non-zero (cause zero very popular starting point), zero-volume point
self.active_x_minus = self.active_x_plus
self.active_y_minus = self.active_y_plus
self.active_z_minus = self.active_z_plus
def setTextures(self, Tile='default.png', repeat=(10,10), bg_dim=(0,0)):
self.left.newTexture(Tile=Tile, repeat=repeat, bg_dim=bg_dim)
self.right.newTexture(Tile=Tile, repeat=repeat, bg_dim=bg_dim)
self.bottom.newTexture(Tile=Tile, repeat=repeat, bg_dim=bg_dim)
self.top.newTexture(Tile=Tile, repeat=repeat, bg_dim=bg_dim)
def __repr__(self):
return str((self.x_minus, self.x_plus, self.y_minus, self.y_plus, self.z_minus, self.z_plus))
class MazeRoom:
def __init__(self, entrance, dim=Point3(100,100,100), offset=Point2(2,2), music='basic.ogg'):
self.music_file = music
if isinstance(entrance,MazeTunnel):
entrance = entrance.exit
origin = entrance.ExitLeftTop
if entrance.direction == 'left':
reference = 'LeftBackTop'
origin -= Point3(0, offset.x, -offset.y)
if entrance.direction == 'right':
reference = 'RightFrontTop'
origin -= Point3(0, -offset.x, -offset.y)
if entrance.direction == 'back':
reference = 'RightBackTop'
origin -= Point3(-offset.x, 0, -offset.y)
if entrance.direction == 'front':
reference = 'LeftFrontTop'
origin -= Point3(offset.x, 0, -offset.y)
if entrance.direction == 'bottom':
reference = 'LeftFrontBottom'
origin -= Point3(offset.x, -offset.y, 0)
if entrance.direction == 'top':
reference = 'LeftBackTop'
origin -= Point3(offset.x, offset.y, 0)
# Possible references of the room: center and corners # \/ Diametrically opposite to: \/
references = [(0, ( 0., 0., 0.), 'xyz', 'center' ),
(1, ( 1., 1., 1.), 'x+y+z+', 'LeftBackBottom' ), # 'RightFrontTop'
(2, ( 1., 1.,-1.), 'x+y+z-', 'LeftBackTop' ), # 'RightFrontBottom'
(3, ( 1.,-1., 1.), 'x+y-z+', 'LeftFrontBottom' ), # 'RightBackTop'
(4, ( 1.,-1.,-1.), 'x+y-z-', 'LeftFrontTop' ), # 'RightBackBottom'
(5, (-1., 1., 1.), 'x-y+z+', 'RightBackBottom' ), # 'LeftFrontTop'
(6, (-1., 1.,-1.), 'x-y+z-', 'RightBackTop' ), # 'LeftFrontBottom'
(7, (-1.,-1., 1.), 'x-y-z+', 'RightFrontBottom'), # 'LeftBackTop'
(8, (-1.,-1.,-1.), 'x-y-z-', 'RightFrontTop' )] # 'LeftBackBottom'
# Finding index of the reference
for item in references:
if reference in item:
reference = item[0]
# if reference == 6: origin += Point3( offset.x, 0, offset.y )
# Delta is half the distance from center
D = Point3( dim.x/2., dim.y/2., dim.z/2.)
d = Point3( references[reference][1] )
delta = Point3(D.x*d.x, D.y*d.y, D.z*d.z)
position = Point3(origin + delta)
# Drawing six sides:
self.left = MazeWall(position - Point3(dim.x/2.,0,0), Point2(dim.y,dim.z), 'left')
self.right = MazeWall(position + Point3(dim.x/2.,0,0), Point2(dim.y,dim.z), 'right')
self.back = MazeWall(position - Point3(0,dim.y/2.,0), Point2(dim.x,dim.z), 'back')
self.front = MazeWall(position + Point3(0,dim.y/2.,0), Point2(dim.x,dim.z), 'front')
self.bottom = MazeWall(position - Point3(0,0,dim.z/2.), Point2(dim.x,dim.y), 'bottom')
self.top = MazeWall(position + Point3(0,0,dim.z/2.), Point2(dim.x,dim.y), 'top')
# Define limits for the MazeRoom: since walls are parallel to axes, it's easy:
self.x_minus = position.x - dim.x/2
self.x_plus = position.x + dim.x/2
self.y_minus = position.y - dim.y/2
self.y_plus = position.y + dim.y/2
self.z_minus = position.z - dim.z/2.
self.z_plus = position.z + dim.z/2.
# Cuting window on the appropriate created wall, to coincide with the entrance.
if entrance.direction == 'left': self.left.cutWindow((offset, Point2(offset+entrance.dim)))
if entrance.direction == 'right': self.right.cutWindow((offset, Point2(offset+entrance.dim)))
if entrance.direction == 'back': self.back.cutWindow((offset, Point2(offset+entrance.dim)))
if entrance.direction == 'front': self.front.cutWindow((offset, Point2(offset+entrance.dim)))
if entrance.direction == 'bottom': self.bottom.cutWindow((offset, Point2(offset+entrance.dim)))
if entrance.direction == 'top': self.top.cutWindow((offset, Point2(offset+entrance.dim)))
def __repr__(self):
return str((self.x_minus, self.x_plus, self.y_minus, self.y_plus, self.z_minus, self.z_plus))
def setTextures(self, Tile='default.png', repeat=(10,10), bg_dim=(0,0)):
self.left.newTexture(Tile=Tile, repeat=repeat, bg_dim=bg_dim)
self.right.newTexture(Tile=Tile, repeat=repeat, bg_dim=bg_dim)
self.back.newTexture(Tile=Tile, repeat=repeat, bg_dim=bg_dim)
self.front.newTexture(Tile=Tile, repeat=repeat, bg_dim=bg_dim)
self.bottom.newTexture(Tile=Tile, repeat=repeat, bg_dim=bg_dim)
self.top.newTexture(Tile=Tile, repeat=repeat, bg_dim=bg_dim)
class StudyMaze(DirectObject):
def __init__(self):
# Cleanup the temporary directory before execution
os.system('rm -rf %s*' % os.path.join(os.getcwd(), 'temp/') )
# List of all the areas (MazeRooms, MazeTunnels) in the maze.
self.mazeAreas = []
self.margin = 2.0 # (Minimum distance to a wall.)
# Saving the current state
self.question_displayed = False
# Load the 'empty room' model.
self.room = loader.loadModel("models/emptyroom")
self.room.reparentTo(rootNode)
# Load background music.
self.music_file = ''
self.active_video = ''
# ========================================================
# >>>>>>>>>>>>>>>>> MAZE WALLS IMAGES >>>>>>>>>>>>>>>>>>>>
execfile('mazes/root.py')
# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ========================================================
# Show framerate
base.setFrameRateMeter(True)
# Make the mouse invisible, turn off normal mouse controls
base.disableMouse()
props = WindowProperties()
props.setCursorHidden(True)
base.win.requestProperties(props)
# Set the current viewing target: facing y+, from (0,0,0)
self.focus = Vec3(0,0,0)
self.heading = 0
self.pitch = 0
self.mousex = 0
self.mousey = 0
self.last = 0
self.mousebtn = [0,0,0]
# Start the camera control task:
taskMgr.add(self.controlCamera, "camera-task")
self.accept("escape", sys.exit, [0])
self.accept("mouse1", self.setMouseBtn, [0, 1])
self.accept("mouse1-up", self.setMouseBtn, [0, 0])
self.accept("mouse2", self.setMouseBtn, [1, 1])
self.accept("mouse2-up", self.setMouseBtn, [1, 0])
self.accept("mouse3", self.setMouseBtn, [2, 1])
self.accept("mouse3-up", self.setMouseBtn, [2, 0])
def setMouseBtn(self, btn, value):
self.mousebtn[btn] = value
def whichAreas(self, point3):
results = []
for area in self.mazeAreas:
if area.x_minus < point3.x < area.x_plus and \
area.y_minus < point3.y < area.y_plus and \
area.z_minus < point3.z < area.z_plus:
results.append(area)
return results
def display_question(self, source=('problem1.png', 'answer1')):
cmi = CardMaker("Problem")
cmi.setFrame(-0.9,0.2,-0.9,0.9)
self.card = render2d.attachNewNode(cmi.generate())
tex = loader.loadTexture( source[0] )
self.answer = source[1]
self.card.setTexture(tex)
def hide_question(self):
try:
if self.card:
self.card.removeNode()
self.question_displayed = False
if self.q:
self.q.destroy()
except:
pass
def controlCamera(self, task):
# figure out how much the mouse has moved (in pixels)
md = base.win.getPointer(0)
x = md.getX()
y = md.getY()
# clearly: necessary for mouse movement
if base.win.movePointer(0, 100, 100):
self.heading = self.heading - (x - 100) * 0.2
self.pitch = self.pitch - (y - 100) * 0.2
# limits up/down movement of the camera
if (self.pitch < -60): self.pitch = -60
if (self.pitch > 60): self.pitch = 60
# clearly: necessary for direction movement
base.camera.setHpr(self.heading,self.pitch,0)
dir = base.camera.getMat().getRow3(1)
# clearly: necessary for forth/back movement
elapsed = task.time - self.last
if (self.last == 0): elapsed = 0
if (self.mousebtn[0]):
self.focus = self.focus + dir * elapsed*30
if (self.mousebtn[1]) or (self.mousebtn[2]):
self.focus = self.focus - dir * elapsed*30
base.camera.setPos(self.focus - (dir*5))
# >>>>>>>>>>>>>>>>> MAZE AREAS' LIMITS >>>>>>>>>>>>>>>>>>>
# Get current camera position
point = Point3(base.camera.getX(), base.camera.getY(), base.camera.getZ())
#print point
# Check which area we are in, assuming single-area-presence for now:
# // Later we will set priorities, in the cases of overlapping limits of
# a MazeRoom and limits of a MazeTunnel. //
# Function that checks for answer, used to update tunnel limits.
def setText(textEntered):
if textEntered == self.tunnel.answer: # i.e., if answer is correct:
try:
self.tunnel.update_limits()
self.hide_question()
except:
pass
textObject.setText(textEntered)
# Function that clears the answer field.
def clearText():
self.q.enterText('')
# Set the limits of that room:
try:
AL = self.whichAreas(point)
# Logically, if we are in more than 1 area, we should let the movement along the axis of the tunnel.
# Determining the axis:
if len(AL) > 1:
pass
elif len(AL) == 1:
for item in AL:
if isinstance(item, MazeTunnel):
free = item.which
if (item.active_x_minus < point.x < item.active_x_plus) and \
(item.active_y_minus < point.y < item.active_y_plus) and \
(item.active_z_minus < point.z < item.active_z_plus):
if not self.question_displayed:
# Display question
self.tunnel = item
self.q = DirectEntry(width=12, text = "" ,scale=.05, pos=(-0.4, 0, -0.7), \
command=setText, initialText="Step away, and type answer!", numLines = 3,focus=1,focusInCommand=clearText)
self.display_question( item.questions[item.current_problem-1] )
self.question_displayed = True
else: # Either not in active zone anymore
self.hide_question()
else: # Or not in MazeTunnel anymore.
self.hide_question()
# Change music:
if self.music_file == '':
self.music_file = item.music_file
self.music = base.loader.loadSfx(self.music_file)
self.music.play()
self.music.setVolume(0.5)
if item.music_file != self.music_file:
self.music.stop()
self.music_file = item.music_file
if os.path.isfile(self.music_file):
self.music = base.loader.loadSfx(item.music_file)
self.music.play()
self.music.setVolume(0.5)
# Check if we are not in any Poster's activation area,
# and activate the poster, in which's are we are in.
WallList = [item.left, item.right, item.back, item.front, item.bottom, item.top]
for wall in WallList:
if len(wall.posters):
for poster in wall.posters.values():
if poster.document.split('.')[-1] in [ 'avi', 'mp4' ]:
if (poster.x_minus < point.x < poster.x_plus) and \
(poster.y_minus < point.y < poster.y_plus) and \
(poster.z_minus < point.z < poster.z_plus):
if self.active_video == '':
self.active_video = poster.media
if id(self.active_video) != id(poster.media):
self.active_video.stop()
self.active_video = poster.media
if self.active_video.getTime() > 0.0:
self.active_video.setTime(0.0)
self.active_video.play()
free = Point3(0,0,0)
AL = AL[0]
if self.music_file != '':
self.accept("z", self.music.stop) # z - for 'sleep zzz'
if self.active_video != '':
self.accept("x", self.active_video.stop) # x - for 'cross-out'
self.accept("c", self.active_video.play) # c - for 'continue'
left_lim = AL.x_minus + self.margin
right_lim = AL.x_plus - self.margin
back_lim = AL.y_minus + self.margin
front_lim = AL.y_plus - self.margin
bottom_lim = AL.z_minus + self.margin
top_lim = AL.z_plus - self.margin
if ( point.x < left_lim ) and not free.x: base.camera.setX(left_lim )
if ( point.x > right_lim ) and not free.x: base.camera.setX(right_lim )
if ( point.y < back_lim ) and not free.y: base.camera.setY(back_lim )
if ( point.y > front_lim ) and not free.y: base.camera.setY(front_lim )
if ( point.z < bottom_lim ) and not free.z: base.camera.setZ(bottom_lim )
if ( point.z > top_lim ) and not free.z: base.camera.setZ(top_lim )
except:
pass
# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
self.focus = base.camera.getPos() + (dir*5)
self.last = task.time
return Task.cont
Maze = StudyMaze()
rootNode.clearModelNodes()
rootNode.flattenStrong()
# Some text for entry field.
bk_text = ""
textObject = OnscreenText(text = bk_text, pos = (0.95,-0.95), scale = 0.07,fg=(1,0.5,0.5,1),align=TextNode.ACenter,mayChange=1)
run()
| [
[
1,
0,
0.0256,
0.001,
0,
0.66,
0,
135,
0,
1,
0,
0,
135,
0,
0
],
[
1,
0,
0.0265,
0.001,
0,
0.66,
0.0333,
810,
0,
1,
0,
0,
810,
0,
0
],
[
1,
0,
0.0275,
0.001,
0,
0.6... | [
"import direct.directbase.DirectStart",
"from panda3d.core import WindowProperties",
"from panda3d.core import TextNode",
"from panda3d.core import Point2, Point3, Vec3, Vec4",
"from direct.task.Task import Task",
"from direct.gui.OnscreenText import OnscreenText",
"from direct.showbase.DirectObject imp... |
# Maze Root
root = Entrance(Point3(0,-10,0), dim=Point2(0,0), direction='back')
for directory in sorted([d for d in os.listdir('mazes') if os.path.isdir(os.path.join('mazes',d))]):
guess = os.path.join(os.getcwd(),os.path.join(os.path.join('mazes',directory),'maze.py'))
if os.path.exists(guess):
# Todo: Read file, and replace all 'abc/def/ghi.xxx' into os.path.join(os.getcwd(), 'abc/def/ghi.xxx') before execution:
execfile(guess)
| [
[
14,
0,
0.3333,
0.1111,
0,
0.66,
0,
696,
3,
3,
0,
0,
554,
10,
3
],
[
6,
0,
0.7778,
0.5556,
0,
0.66,
1,
229,
3,
0,
0,
0,
0,
0,
10
],
[
14,
1,
0.6667,
0.1111,
1,
0.9... | [
"root = Entrance(Point3(0,-10,0), dim=Point2(0,0), direction='back')",
"for directory in sorted([d for d in os.listdir('mazes') if os.path.isdir(os.path.join('mazes',d))]):\n guess = os.path.join(os.getcwd(),os.path.join(os.path.join('mazes',directory),'maze.py'))\n if os.path.exists(guess):\n # Todo... |
XXX101_r1 = MazeRoom(root, dim=Point3(50,50,50), offset=Point2(25,25), music='mazes/XXX101/basic.ogg')
self.mazeAreas.append(XXX101_r1)
| [
[
14,
0,
0.5,
0.5,
0,
0.66,
0,
50,
3,
4,
0,
0,
65,
10,
3
],
[
8,
0,
1,
0.5,
0,
0.66,
1,
243,
3,
1,
0,
0,
0,
0,
1
]
] | [
"XXX101_r1 = MazeRoom(root, dim=Point3(50,50,50), offset=Point2(25,25), music='mazes/XXX101/basic.ogg')",
"self.mazeAreas.append(XXX101_r1)"
] |
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the integration file for Python.
"""
import cgi
import os
import re
import string
def escape(text, replace=string.replace):
"""Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
text = replace(text, "'", ''')
return text
# The FCKeditor class
class FCKeditor(object):
def __init__(self, instanceName):
self.InstanceName = instanceName
self.BasePath = '/fckeditor/'
self.Width = '100%'
self.Height = '200'
self.ToolbarSet = 'Default'
self.Value = '';
self.Config = {}
def Create(self):
return self.CreateHtml()
def CreateHtml(self):
HtmlValue = escape(self.Value)
Html = ""
if (self.IsCompatible()):
File = "fckeditor.html"
Link = "%seditor/%s?InstanceName=%s" % (
self.BasePath,
File,
self.InstanceName
)
if (self.ToolbarSet is not None):
Link += "&Toolbar=%s" % self.ToolbarSet
# Render the linked hidden field
Html += "<input type=\"hidden\" id=\"%s\" name=\"%s\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.InstanceName,
HtmlValue
)
# Render the configurations hidden field
Html += "<input type=\"hidden\" id=\"%s___Config\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.GetConfigFieldString()
)
# Render the editor iframe
Html += "<iframe id=\"%s\__Frame\" src=\"%s\" width=\"%s\" height=\"%s\" frameborder=\"0\" scrolling=\"no\"></iframe>" % (
self.InstanceName,
Link,
self.Width,
self.Height
)
else:
if (self.Width.find("%%") < 0):
WidthCSS = "%spx" % self.Width
else:
WidthCSS = self.Width
if (self.Height.find("%%") < 0):
HeightCSS = "%spx" % self.Height
else:
HeightCSS = self.Height
Html += "<textarea name=\"%s\" rows=\"4\" cols=\"40\" style=\"width: %s; height: %s;\" wrap=\"virtual\">%s</textarea>" % (
self.InstanceName,
WidthCSS,
HeightCSS,
HtmlValue
)
return Html
def IsCompatible(self):
if (os.environ.has_key("HTTP_USER_AGENT")):
sAgent = os.environ.get("HTTP_USER_AGENT", "")
else:
sAgent = ""
if (sAgent.find("MSIE") >= 0) and (sAgent.find("mac") < 0) and (sAgent.find("Opera") < 0):
i = sAgent.find("MSIE")
iVersion = float(sAgent[i+5:i+5+3])
if (iVersion >= 5.5):
return True
return False
elif (sAgent.find("Gecko/") >= 0):
i = sAgent.find("Gecko/")
iVersion = int(sAgent[i+6:i+6+8])
if (iVersion >= 20030210):
return True
return False
elif (sAgent.find("Opera/") >= 0):
i = sAgent.find("Opera/")
iVersion = float(sAgent[i+6:i+6+4])
if (iVersion >= 9.5):
return True
return False
elif (sAgent.find("AppleWebKit/") >= 0):
p = re.compile('AppleWebKit\/(\d+)', re.IGNORECASE)
m = p.search(sAgent)
if (m.group(1) >= 522):
return True
return False
else:
return False
def GetConfigFieldString(self):
sParams = ""
bFirst = True
for sKey in self.Config.keys():
sValue = self.Config[sKey]
if (not bFirst):
sParams += "&"
else:
bFirst = False
if (sValue):
k = escape(sKey)
v = escape(sValue)
if (sValue == "true"):
sParams += "%s=true" % k
elif (sValue == "false"):
sParams += "%s=false" % k
else:
sParams += "%s=%s" % (k, v)
return sParams
| [
[
8,
0,
0.0719,
0.1375,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.15,
0.0063,
0,
0.66,
0.1667,
934,
0,
1,
0,
0,
934,
0,
0
],
[
1,
0,
0.1562,
0.0063,
0,
0.66,
... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2009 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi",
"import os",
"import re",
"import string",
"def escape(text,... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| [
[
8,
0,
0.2586,
0.431,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.5,
0.0172,
0,
0.66,
0.2,
385,
0,
1,
0,
0,
385,
0,
0
],
[
1,
0,
0.5172,
0.0172,
0,
0.66,
0... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2009 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from connector import FCKeditorConnector",
"from upload import FCKeditorQuickUp... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = "/"
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0213,
0.0213,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0.2,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
0.6... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorQuickUpload(\tFCKeditorConnectorBase,\n\t\t\t\t\t\t\tUploadFileCommandMixin,\n\t\t\t... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| [
[
8,
0,
0.1667,
0.2778,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.3111,
0.0111,
0,
0.66,
0.1429,
934,
0,
2,
0,
0,
934,
0,
0
],
[
1,
0,
0.3333,
0.0111,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2009 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi, os",
"from fckutil import *",
"from fckcommands import * \t# defa... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
if (command == "FileUpload"):
return self.sendUploadResults( errorNo = 102, customMsg = "" )
else:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0127,
0.0127,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.038,
0.0127,
0,
0.66,
0.1667,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0506,
0.0127,
0,
0... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckoutput import * \t# base http, xml and html output mixins",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorConnector(\tF... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
if number != 1:
return """<Error number="%s" />""" % (number)
else:
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
"Minified version of the document.domain automatic fix script (#1919)."
"The original script can be found at _dev/domain_fix_template.js"
return """<script type="text/javascript">
(function(){var d=document.domain;while (true){try{var A=window.parent.document.domain;break;}catch(e) {};d=d.replace(/.*?(?:\.|$)/,'');if (d.length==0) break;try{document.domain=d;}catch (e){break;}}})();
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| [
[
8,
0,
0.1176,
0.1933,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.2269,
0.0084,
0,
0.66,
0.1429,
654,
0,
2,
0,
0,
654,
0,
0
],
[
1,
0,
0.2353,
0.0084,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2009 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from time import gmtime, strftime",
"import string",
"def escape(text, replac... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| [
[
8,
0,
0.1667,
0.2778,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.3111,
0.0111,
0,
0.66,
0.1429,
934,
0,
2,
0,
0,
934,
0,
0
],
[
1,
0,
0.3333,
0.0111,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2009 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi, os",
"from fckutil import *",
"from fckcommands import * \t# defa... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
if (command == "FileUpload"):
return self.sendUploadResults( errorNo = 102, customMsg = "" )
else:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0127,
0.0127,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.038,
0.0127,
0,
0.66,
0.1667,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0506,
0.0127,
0,
0... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckoutput import * \t# base http, xml and html output mixins",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorConnector(\tF... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
if number != 1:
return """<Error number="%s" />""" % (number)
else:
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
"Minified version of the document.domain automatic fix script (#1919)."
"The original script can be found at _dev/domain_fix_template.js"
return """<script type="text/javascript">
(function(){var d=document.domain;while (true){try{var A=window.parent.document.domain;break;}catch(e) {};d=d.replace(/.*?(?:\.|$)/,'');if (d.length==0) break;try{document.domain=d;}catch (e){break;}}})();
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| [
[
8,
0,
0.1176,
0.1933,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.2269,
0.0084,
0,
0.66,
0.1429,
654,
0,
2,
0,
0,
654,
0,
0
],
[
1,
0,
0.2353,
0.0084,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2009 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from time import gmtime, strftime",
"import string",
"def escape(text, replac... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| [
[
8,
0,
0.2586,
0.431,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.5,
0.0172,
0,
0.66,
0.2,
385,
0,
1,
0,
0,
385,
0,
0
],
[
1,
0,
0.5172,
0.0172,
0,
0.66,
0... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2009 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from connector import FCKeditorConnector",
"from upload import FCKeditorQuickUp... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = "/"
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0213,
0.0213,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0.2,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
0.6... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorQuickUpload(\tFCKeditorConnectorBase,\n\t\t\t\t\t\t\tUploadFileCommandMixin,\n\t\t\t... |
#!/usr/bin/env python
# encoding: utf-8
import random
"""
' Author: Jonathan Potter
'
' Name: parameters
'
' Description: The simulation parameters, or knobs. These can be changed by the user to tailor the
' simulation to his needs. These are described more in depth in the User's Manual of the documentation.
'
' Notes:
' Encryption time found using time command to show
' system time used to encrypt 68byte (worst-case size)
' file using GPG.
'
' Hop time found using traceroute with 68byte (worst-case size) packets.
'
' Both measurements were repeated and mins and maxes were used
' as parameters in random number generation.
'
"""
parameters = {
'iterations' : 5, # number of times to run each step (these will be averaged)
'numberOfNodesRange' : (5, 1000, 5), # start, stop, step
'encryptionTimeRange' : (.1, 1), # range in milliseconds
'hopTimeRange' : (.1, 1), # hop time range in ms
'bandwidthRange' : (1, 10), # range
'packetSize' : 68, # bytes
'defaultDataNumberOfPackets' : 10, # use this many packets when calculating the bandwidth of a path
'snapshot' : [10, 20, 100, 200]
}
| [
[
1,
0,
0.1081,
0.027,
0,
0.66,
0,
715,
0,
1,
0,
0,
715,
0,
0
],
[
8,
0,
0.4324,
0.5135,
0,
0.66,
0.5,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.8243,
0.2703,
0,
0.66,
... | [
"import random",
"\"\"\"\n' Author: Jonathan Potter\n'\n' Name: parameters\n'\n' Description: The simulation parameters, or knobs. These can be changed by the user to tailor the\n' simulation to his needs. These are described more in depth in the User's Manual of the documentation.\n'",
"parameters = {\n\t'it... |
#!/usr/bin/env python
# encoding: utf-8
import sys
import os
from parameters import *
from driver import Driver
"""
'
' Author: Jonathan Potter
'
' Name: Simulation
'
' Description: The main simulation entry point. This is the file you run, and it simply
' passes off control to the driver.
'
' Return: None.
'
"""
def main():
d = Driver(parameters)
d.simulate()
if __name__ == '__main__':
main()
| [
[
1,
0,
0.1379,
0.0345,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.1724,
0.0345,
0,
0.66,
0.1667,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.2414,
0.0345,
0,
... | [
"import sys",
"import os",
"from parameters import *",
"from driver import Driver",
"\"\"\"\n'\n' Author: Jonathan Potter\n'\n' Name: Simulation\n'\n' Description: The main simulation entry point. This is the file you run, and it simply\n' passes off control to the driver.",
"def main():\n\td = Driver(pa... |
#!/usr/bin/env python
# encoding: utf-8
import networkx as nx
import matplotlib.pyplot as plt
import sys
import os
import random
import math
from protocols import Simple
from protocols import Crowds
from protocols import OnionRouting
from protocols import Utilities
"""
'
' Author: Jonathan Potter
'
' Name: Driver
'
' Description: Drives the simulation, including reading in simulation parameters,
' generating a network, running each protocol on the generated network, and
' graphing statistics.
'
"""
class Driver:
# The simulation input parameters (knobs)
parameters = None
# The list of protocols to run the simulation with
protocols = None
"""
' Name: __init__
'
' Description: Constructor for class Driver. Sets up protocols and parameters.
'
' Parameters: parameters The simulation wide parameters (knobs)
'
' Return: An instance of class Driver.
'
"""
def __init__(self, parameters):
self.parameters = parameters
self.protocols = {
'Simple': Simple(),
'Crowds': Crowds(),
'OnionRouting': OnionRouting()
}
"""
' Name: generateNetwork
'
' Description: Generates a random network to use in the simulation.
'
' Parameters: numberOfNodes The number of nodes, or network size of the generated network.
'
' Return: A network of size numberOfNodes
'
"""
def generateNetwork(self, numberOfNodes):
network = nx.connected_watts_strogatz_graph(numberOfNodes, int(round(math.log(numberOfNodes))), .8)
# add a random weight between 1 and 10 for each edge
for (u,v) in network.edges():
weight = random.randint(*self.parameters['bandwidthRange'])
network[u][v] = {'weight': weight}
network[v][u] = {'weight': weight}
return network
"""
' Name: displayNetwork
'
' Description: Saves a visual representation of the network to a file. This includes a highlighted path.
'
' Parameters: network The network to display.
' path The path to highlight on the network.
' filename The filename to save the image to.
'
' Return: None.
'
"""
def displayNetwork(self, network, path, filename):
(nodeColors, edgeColors) = self.highlightPath(network, path)
# Do widths
edgeWidths = []
for (u, v) in network.edges():
edgeWidths.append(Utilities.getBandwidth(self.parameters, network, u, v))
pos = nx.graphviz_layout(network, prog = 'dot')
nx.draw_networkx_edges( network,
pos,
alpha = .3,
width = edgeWidths,
edge_color = edgeColors)
nx.draw_networkx_edges( network,
pos,
alpha = .4,
width = 1,
edge_color = 'k')
nx.draw_networkx_nodes( network,
pos,
node_size = 80,
alpha = 1,
node_color = nodeColors,
with_labels = False)
plt.axis('off')
# draw it
plt.savefig(filename)
plt.close()
"""
' Name: getRandomEndpoints
'
' Description: Gets a random source and destination node in the network to use for routing data
' in the simulation.
'
' Parameters: network The network to get source and destination from.
'
' Return: (u, v) U is the source node, and V is the destination node.
'
"""
def getRandomEndpoints(self, network):
u = random.choice(network.nodes())
withoutU = network.nodes()[:]
withoutU.remove(u)
v = random.choice(withoutU)
return (u, v)
"""
' Name: highlightPath
'
' Description: Visually highlights a path in our network. This function is used when saving
' a snapshot to a file.
'
' Parameters: network The network to use.
' path The path to highlight on the network.
'
' Return: (node_colors, edge_colors) Where node_colors and edge_colors are lists of color information to
' be used when saving a snapshot of the graph. These have the path highlighted.
'
"""
def highlightPath(self, network, path):
node_colors = [255 for i in range(network.number_of_nodes())]
edge_colors = ['black' for i in range(network.number_of_edges())]
pairs = Utilities.pairs(path)[:-1]
for (a, b) in pairs:
if b < a:
edge = (b, a)
else:
edge = (a, b)
try:
aIndex = network.nodes().index(a)
bIndex = network.nodes().index(b)
node_colors[aIndex] = 0
node_colors[bIndex] = 0
except:
pass
try:
edgeIndex = network.edges().index(edge)
edge_colors[edgeIndex] = 'magenta'
except:
pass
return (node_colors, edge_colors)
"""
' Name: initPlot
'
' Description: Initialize gnuplot to plot a certain dataset. This function opens a pipe to gnuplot and
' sets up basic axes and titles, etc.
'
' Parameters: name The name of this plot. This will be used as the filename to store the raw data in.
' title The title of this plot. This is used as the title of the graph.
' xLabel The label for the x axis of the plot.
' yLabel The label for the y axis of the plot.
'
' Return: (plotFile, gnuplot) Handles to the data file, and the gnuplot pipe respectively.
'
"""
def initPlot(self, name, title, xLabel, yLabel):
# initialize plot file
plotFile = open(name, "w")
gnuplot = os.popen("gnuplot -persist 2> gnuplot.log", "w")
# initialize gnuplot
# gnuplot.write("set terminal x11\n")
gnuplot.write("set title '%s'\n" % title)
gnuplot.write("set xlabel '%s'\n" % xLabel)
gnuplot.write("set ylabel '%s'\n" % yLabel)
gnuplot.write("plot '%s' using 1:2 title 'Simple' with lines, '%s' using 1:3 title 'Crowds' with lines, '%s' using 1:4 title 'Onion Routing' with lines\n" % (name, name, name))
return (plotFile, gnuplot)
"""
' Name: plotSlice
'
' Description: Plots one datapoint of the graph. This function is called each time through a loop
' to get live real time plotting.
'
' Parameters: plot The plot returned by the initPlot function.
' data The datapoint to plot.
'
' Return: None.
'
"""
def plotSlice(self, plot, data):
(plotFile, gnuplot) = plot
plotFile.write(' '.join(map(lambda x: str(x), data)) + "\n")
plotFile.flush()
gnuplot.write("replot\n")
try:
gnuplot.flush()
except:
pass
"""
' Name: closePlot
'
' Description: Cleans up all plot variables, and closes file and pipe handles.
'
' Parameters: plot The plot returned by initPlot.
'
' Return: None.
'
"""
def closePlot(self, plot):
(plotFile, gnuplot) = plot
plotFile.close()
gnuplot.close()
"""
' Name: simulate
'
' Description: Main simulation funciton. Drives each of the protocols, generates the networks, and plots
' statistics from the simulation.
'
' Parameters: None
'
' Return: None.
'
"""
def simulate(self):
bandwidthPlot = self.initPlot("bandwidth", "Bandwidth Comparison", "Number of nodes", "Bandwidth (Mbps)")
overheadPlot = self.initPlot("overhead", "Overhead Comparison", "Number of nodes", "Overhead (milliseconds)")
# data in bits
amountOfData = self.parameters['defaultDataNumberOfPackets'] * self.parameters['packetSize'] * 8
for n in range(*self.parameters['numberOfNodesRange']):
iterationTimes = [[], [], []]
for i in range(self.parameters['iterations']):
network = self.generateNetwork(n)
(source, destination) = self.getRandomEndpoints(network)
# Simple protocol
simplePath = self.protocols['Simple'].getPath(network, self.parameters, source, destination)
iterationTimes[0].append(self.protocols['Simple'].runSimulation(network, self.parameters, simplePath))
# Crowds protocol
(crowdsPath, shortestPath, crowdsNetwork) = self.protocols['Crowds'].getPath(network, self.parameters, source)
iterationTimes[1].append(self.protocols['Crowds'].runSimulation(crowdsNetwork, self.parameters, crowdsPath, shortestPath))
self.protocols['Crowds'].merge(crowdsPath, shortestPath)
# Onion Routing protocol
orPath = self.protocols['OnionRouting'].getPath(self.parameters, network, source, destination)
iterationTimes[2].append(self.protocols['OnionRouting'].runSimulation(network, self.parameters, orPath))
if n in self.parameters['snapshot'] and i is 1:
self.displayNetwork(network, simplePath, 'nodes_%s_protocol_%s' % (n, 'simple'))
self.displayNetwork(crowdsNetwork, crowdsPath, 'nodes_%s_protocol_%s' % (n, 'crowds'))
self.displayNetwork(network, orPath, 'nodes_%s_protocol_%s' % (n, 'onionrouting'))
# In ms
averageTimes = map(lambda p: float(sum(p)) / len(p), iterationTimes)
# In Mbps
bandwidths = map(lambda x: (float(amountOfData) / x) / 10 ** 3, averageTimes)
# In ms
overheads = averageTimes
self.plotSlice(bandwidthPlot, [n] + bandwidths)
self.plotSlice(overheadPlot, [n] + overheads)
self.closePlot(bandwidthPlot)
self.closePlot(overheadPlot)
| [
[
1,
0,
0.0125,
0.0031,
0,
0.66,
0,
691,
0,
1,
0,
0,
691,
0,
0
],
[
1,
0,
0.0156,
0.0031,
0,
0.66,
0.0909,
596,
0,
1,
0,
0,
596,
0,
0
],
[
1,
0,
0.0219,
0.0031,
0,
... | [
"import networkx as nx",
"import matplotlib.pyplot as plt",
"import sys",
"import os",
"import random",
"import math",
"from protocols import Simple",
"from protocols import Crowds",
"from protocols import OnionRouting",
"from protocols import Utilities",
"\"\"\"\n'\n' Author: Jonathan Potter\n'... |
#!/opt/local/Library/Frameworks/Python.framework/Versions/2.7/bin/python
import codecs
import re
import jinja2
import markdown
def process_slides():
with codecs.open('../presentation.html', 'w', encoding='utf8') as outfile:
md = codecs.open('slides.md', encoding='utf8').read()
md_slides = md.split('\n---\n')
print len(md_slides)
slides = []
# Process each slide separately.
for md_slide in md_slides:
slide = {}
sections = md_slide.split('\n\n')
# Extract metadata at the beginning of the slide (look for key: value)
# pairs.
metadata_section = sections[0]
metadata = parse_metadata(metadata_section)
slide.update(metadata)
remainder_index = metadata and 1 or 0
# Get the content from the rest of the slide.
content_section = '\n\n'.join(sections[remainder_index:])
html = markdown.markdown(content_section)
slide['content'] = postprocess_html(html, markdown)
slides.append(slide)
template = jinja2.Template(open('base.html').read())
outfile.write(template.render(locals()))
def parse_metadata(section):
"""Given the first part of a slide, returns metadata associated with it."""
metadata = {}
metadata_lines = section.split('\n')
for line in metadata_lines:
colon_index = line.find(':')
if colon_index != -1:
key = line[:colon_index].strip()
val = line[colon_index + 1:].strip()
metadata[key] = val
return metadata
def postprocess_html(html, metadata):
"""Returns processed HTML to fit into the slide template format."""
return html
if __name__ == '__main__':
process_slides()
| [
[
1,
0,
0.0377,
0.0189,
0,
0.66,
0,
220,
0,
1,
0,
0,
220,
0,
0
],
[
1,
0,
0.0566,
0.0189,
0,
0.66,
0.1429,
540,
0,
1,
0,
0,
540,
0,
0
],
[
1,
0,
0.0755,
0.0189,
0,
... | [
"import codecs",
"import re",
"import jinja2",
"import markdown",
"def process_slides():\n with codecs.open('../presentation.html', 'w', encoding='utf8') as outfile:\n md = codecs.open('slides.md', encoding='utf8').read()\n md_slides = md.split('\\n---\\n')\n print(len(md_slides))\n\n slides = ... |
#!/usr/bin/python2.6
#
# Simple http server to emulate api.playfoursquare.com
import logging
import shutil
import sys
import urlparse
import SimpleHTTPServer
import BaseHTTPServer
class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Handle playfoursquare.com requests, for testing."""
def do_GET(self):
logging.warn('do_GET: %s, %s', self.command, self.path)
url = urlparse.urlparse(self.path)
logging.warn('do_GET: %s', url)
query = urlparse.parse_qs(url.query)
query_keys = [pair[0] for pair in query]
response = self.handle_url(url)
if response != None:
self.send_200()
shutil.copyfileobj(response, self.wfile)
self.wfile.close()
do_POST = do_GET
def handle_url(self, url):
path = None
if url.path == '/v1/venue':
path = '../captures/api/v1/venue.xml'
elif url.path == '/v1/addvenue':
path = '../captures/api/v1/venue.xml'
elif url.path == '/v1/venues':
path = '../captures/api/v1/venues.xml'
elif url.path == '/v1/user':
path = '../captures/api/v1/user.xml'
elif url.path == '/v1/checkcity':
path = '../captures/api/v1/checkcity.xml'
elif url.path == '/v1/checkins':
path = '../captures/api/v1/checkins.xml'
elif url.path == '/v1/cities':
path = '../captures/api/v1/cities.xml'
elif url.path == '/v1/switchcity':
path = '../captures/api/v1/switchcity.xml'
elif url.path == '/v1/tips':
path = '../captures/api/v1/tips.xml'
elif url.path == '/v1/checkin':
path = '../captures/api/v1/checkin.xml'
elif url.path == '/history/12345.rss':
path = '../captures/api/v1/feed.xml'
if path is None:
self.send_error(404)
else:
logging.warn('Using: %s' % path)
return open(path)
def send_200(self):
self.send_response(200)
self.send_header('Content-type', 'text/xml')
self.end_headers()
def main():
if len(sys.argv) > 1:
port = int(sys.argv[1])
else:
port = 8080
server_address = ('0.0.0.0', port)
httpd = BaseHTTPServer.HTTPServer(server_address, RequestHandler)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
httpd.serve_forever()
if __name__ == '__main__':
main()
| [
[
1,
0,
0.0588,
0.0118,
0,
0.66,
0,
715,
0,
1,
0,
0,
715,
0,
0
],
[
1,
0,
0.0706,
0.0118,
0,
0.66,
0.125,
614,
0,
1,
0,
0,
614,
0,
0
],
[
1,
0,
0.0824,
0.0118,
0,
0... | [
"import logging",
"import shutil",
"import sys",
"import urlparse",
"import SimpleHTTPServer",
"import BaseHTTPServer",
"class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):\n \"\"\"Handle playfoursquare.com requests, for testing.\"\"\"\n\n def do_GET(self):\n logging.warn('do_GET: %s, %s',... |
#!/usr/bin/python
import datetime
import sys
import textwrap
import common
from xml.dom import pulldom
PARSER = """\
/**
* Copyright 2009 Joe LaPenna
*/
package com.joelapenna.foursquare.parsers;
import com.joelapenna.foursquare.Foursquare;
import com.joelapenna.foursquare.error.FoursquareError;
import com.joelapenna.foursquare.error.FoursquareParseException;
import com.joelapenna.foursquare.types.%(type_name)s;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Auto-generated: %(timestamp)s
*
* @author Joe LaPenna (joe@joelapenna.com)
* @param <T>
*/
public class %(type_name)sParser extends AbstractParser<%(type_name)s> {
private static final Logger LOG = Logger.getLogger(%(type_name)sParser.class.getCanonicalName());
private static final boolean DEBUG = Foursquare.PARSER_DEBUG;
@Override
public %(type_name)s parseInner(XmlPullParser parser) throws XmlPullParserException, IOException,
FoursquareError, FoursquareParseException {
parser.require(XmlPullParser.START_TAG, null, null);
%(type_name)s %(top_node_name)s = new %(type_name)s();
while (parser.nextTag() == XmlPullParser.START_TAG) {
String name = parser.getName();
%(stanzas)s
} else {
// Consume something we don't understand.
if (DEBUG) LOG.log(Level.FINE, "Found tag that we don't recognize: " + name);
skipSubTree(parser);
}
}
return %(top_node_name)s;
}
}"""
BOOLEAN_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(Boolean.valueOf(parser.nextText()));
"""
GROUP_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new GroupParser(new %(sub_parser_camel_case)s()).parse(parser));
"""
COMPLEX_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new %(parser_name)s().parse(parser));
"""
STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(parser.nextText());
"""
def main():
type_name, top_node_name, attributes = common.WalkNodesForAttributes(
sys.argv[1])
GenerateClass(type_name, top_node_name, attributes)
def GenerateClass(type_name, top_node_name, attributes):
"""generate it.
type_name: the type of object the parser returns
top_node_name: the name of the object the parser returns.
per common.WalkNodsForAttributes
"""
stanzas = []
for name in sorted(attributes):
typ, children = attributes[name]
replacements = Replacements(top_node_name, name, typ, children)
if typ == common.BOOLEAN:
stanzas.append(BOOLEAN_STANZA % replacements)
elif typ == common.GROUP:
stanzas.append(GROUP_STANZA % replacements)
elif typ in common.COMPLEX:
stanzas.append(COMPLEX_STANZA % replacements)
else:
stanzas.append(STANZA % replacements)
if stanzas:
# pop off the extranious } else for the first conditional stanza.
stanzas[0] = stanzas[0].replace('} else ', '', 1)
replacements = Replacements(top_node_name, name, typ, [None])
replacements['stanzas'] = '\n'.join(stanzas).strip()
print PARSER % replacements
def Replacements(top_node_name, name, typ, children):
# CameCaseClassName
type_name = ''.join([word.capitalize() for word in top_node_name.split('_')])
# CamelCaseClassName
camel_name = ''.join([word.capitalize() for word in name.split('_')])
# camelCaseLocalName
attribute_name = camel_name.lower().capitalize()
# mFieldName
field_name = 'm' + camel_name
if children[0]:
sub_parser_camel_case = children[0] + 'Parser'
else:
sub_parser_camel_case = (camel_name[:-1] + 'Parser')
return {
'type_name': type_name,
'name': name,
'top_node_name': top_node_name,
'camel_name': camel_name,
'parser_name': typ + 'Parser',
'attribute_name': attribute_name,
'field_name': field_name,
'typ': typ,
'timestamp': datetime.datetime.now(),
'sub_parser_camel_case': sub_parser_camel_case,
'sub_type': children[0]
}
if __name__ == '__main__':
main()
| [
[
1,
0,
0.0201,
0.0067,
0,
0.66,
0,
426,
0,
1,
0,
0,
426,
0,
0
],
[
1,
0,
0.0268,
0.0067,
0,
0.66,
0.0769,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0336,
0.0067,
0,
... | [
"import datetime",
"import sys",
"import textwrap",
"import common",
"from xml.dom import pulldom",
"PARSER = \"\"\"\\\n/**\n * Copyright 2009 Joe LaPenna\n */\n\npackage com.joelapenna.foursquare.parsers;\n\nimport com.joelapenna.foursquare.Foursquare;",
"BOOLEAN_STANZA = \"\"\"\\\n } else i... |
#!/usr/bin/python
"""
Pull a oAuth protected page from foursquare.
Expects ~/.oget to contain (one on each line):
CONSUMER_KEY
CONSUMER_KEY_SECRET
USERNAME
PASSWORD
Don't forget to chmod 600 the file!
"""
import httplib
import os
import re
import sys
import urllib
import urllib2
import urlparse
import user
from xml.dom import pulldom
from xml.dom import minidom
import oauth
"""From: http://groups.google.com/group/foursquare-api/web/oauth
@consumer = OAuth::Consumer.new("consumer_token","consumer_secret", {
:site => "http://foursquare.com",
:scheme => :header,
:http_method => :post,
:request_token_path => "/oauth/request_token",
:access_token_path => "/oauth/access_token",
:authorize_path => "/oauth/authorize"
})
"""
SERVER = 'api.foursquare.com:80'
CONTENT_TYPE_HEADER = {'Content-Type' :'application/x-www-form-urlencoded'}
SIGNATURE_METHOD = oauth.OAuthSignatureMethod_HMAC_SHA1()
AUTHEXCHANGE_URL = 'http://api.foursquare.com/v1/authexchange'
def parse_auth_response(auth_response):
return (
re.search('<oauth_token>(.*)</oauth_token>', auth_response).groups()[0],
re.search('<oauth_token_secret>(.*)</oauth_token_secret>',
auth_response).groups()[0]
)
def create_signed_oauth_request(username, password, consumer):
oauth_request = oauth.OAuthRequest.from_consumer_and_token(
consumer, http_method='POST', http_url=AUTHEXCHANGE_URL,
parameters=dict(fs_username=username, fs_password=password))
oauth_request.sign_request(SIGNATURE_METHOD, consumer, None)
return oauth_request
def main():
url = urlparse.urlparse(sys.argv[1])
# Nevermind that the query can have repeated keys.
parameters = dict(urlparse.parse_qsl(url.query))
password_file = open(os.path.join(user.home, '.oget'))
lines = [line.strip() for line in password_file.readlines()]
if len(lines) == 4:
cons_key, cons_key_secret, username, password = lines
access_token = None
else:
cons_key, cons_key_secret, username, password, token, secret = lines
access_token = oauth.OAuthToken(token, secret)
consumer = oauth.OAuthConsumer(cons_key, cons_key_secret)
if not access_token:
oauth_request = create_signed_oauth_request(username, password, consumer)
connection = httplib.HTTPConnection(SERVER)
headers = {'Content-Type' :'application/x-www-form-urlencoded'}
connection.request(oauth_request.http_method, AUTHEXCHANGE_URL,
body=oauth_request.to_postdata(), headers=headers)
auth_response = connection.getresponse().read()
token = parse_auth_response(auth_response)
access_token = oauth.OAuthToken(*token)
open(os.path.join(user.home, '.oget'), 'w').write('\n'.join((
cons_key, cons_key_secret, username, password, token[0], token[1])))
oauth_request = oauth.OAuthRequest.from_consumer_and_token(consumer,
access_token, http_method='POST', http_url=url.geturl(),
parameters=parameters)
oauth_request.sign_request(SIGNATURE_METHOD, consumer, access_token)
connection = httplib.HTTPConnection(SERVER)
connection.request(oauth_request.http_method, oauth_request.to_url(),
body=oauth_request.to_postdata(), headers=CONTENT_TYPE_HEADER)
print connection.getresponse().read()
#print minidom.parse(connection.getresponse()).toprettyxml(indent=' ')
if __name__ == '__main__':
main()
| [
[
8,
0,
0.0631,
0.0991,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.1261,
0.009,
0,
0.66,
0.05,
2,
0,
1,
0,
0,
2,
0,
0
],
[
1,
0,
0.1351,
0.009,
0,
0.66,
0.... | [
"\"\"\"\nPull a oAuth protected page from foursquare.\n\nExpects ~/.oget to contain (one on each line):\nCONSUMER_KEY\nCONSUMER_KEY_SECRET\nUSERNAME\nPASSWORD",
"import httplib",
"import os",
"import re",
"import sys",
"import urllib",
"import urllib2",
"import urlparse",
"import user",
"from xml.... |
#!/usr/bin/python
import os
import subprocess
import sys
BASEDIR = '../main/src/com/joelapenna/foursquare'
TYPESDIR = '../captures/types/v1'
captures = sys.argv[1:]
if not captures:
captures = os.listdir(TYPESDIR)
for f in captures:
basename = f.split('.')[0]
javaname = ''.join([c.capitalize() for c in basename.split('_')])
fullpath = os.path.join(TYPESDIR, f)
typepath = os.path.join(BASEDIR, 'types', javaname + '.java')
parserpath = os.path.join(BASEDIR, 'parsers', javaname + 'Parser.java')
cmd = 'python gen_class.py %s > %s' % (fullpath, typepath)
print cmd
subprocess.call(cmd, stdout=sys.stdout, shell=True)
cmd = 'python gen_parser.py %s > %s' % (fullpath, parserpath)
print cmd
subprocess.call(cmd, stdout=sys.stdout, shell=True)
| [
[
1,
0,
0.1111,
0.037,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.1481,
0.037,
0,
0.66,
0.1429,
394,
0,
1,
0,
0,
394,
0,
0
],
[
1,
0,
0.1852,
0.037,
0,
0.6... | [
"import os",
"import subprocess",
"import sys",
"BASEDIR = '../main/src/com/joelapenna/foursquare'",
"TYPESDIR = '../captures/types/v1'",
"captures = sys.argv[1:]",
"if not captures:\n captures = os.listdir(TYPESDIR)",
" captures = os.listdir(TYPESDIR)",
"for f in captures:\n basename = f.split('... |
#!/usr/bin/python
import logging
from xml.dom import minidom
from xml.dom import pulldom
BOOLEAN = "boolean"
STRING = "String"
GROUP = "Group"
# Interfaces that all FoursquareTypes implement.
DEFAULT_INTERFACES = ['FoursquareType']
# Interfaces that specific FoursqureTypes implement.
INTERFACES = {
}
DEFAULT_CLASS_IMPORTS = [
]
CLASS_IMPORTS = {
# 'Checkin': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
# 'Venue': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
# 'Tip': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
}
COMPLEX = [
'Group',
'Badge',
'Beenhere',
'Checkin',
'CheckinResponse',
'City',
'Credentials',
'Data',
'Mayor',
'Rank',
'Score',
'Scoring',
'Settings',
'Stats',
'Tags',
'Tip',
'User',
'Venue',
]
TYPES = COMPLEX + ['boolean']
def WalkNodesForAttributes(path):
"""Parse the xml file getting all attributes.
<venue>
<attribute>value</attribute>
</venue>
Returns:
type_name - The java-style name the top node will have. "Venue"
top_node_name - unadultured name of the xml stanza, probably the type of
java class we're creating. "venue"
attributes - {'attribute': 'value'}
"""
doc = pulldom.parse(path)
type_name = None
top_node_name = None
attributes = {}
level = 0
for event, node in doc:
# For skipping parts of a tree.
if level > 0:
if event == pulldom.END_ELEMENT:
level-=1
logging.warn('(%s) Skip end: %s' % (str(level), node))
continue
elif event == pulldom.START_ELEMENT:
logging.warn('(%s) Skipping: %s' % (str(level), node))
level+=1
continue
if event == pulldom.START_ELEMENT:
logging.warn('Parsing: ' + node.tagName)
# Get the type name to use.
if type_name is None:
type_name = ''.join([word.capitalize()
for word in node.tagName.split('_')])
top_node_name = node.tagName
logging.warn('Found Top Node Name: ' + top_node_name)
continue
typ = node.getAttribute('type')
child = node.getAttribute('child')
# We don't want to walk complex types.
if typ in COMPLEX:
logging.warn('Found Complex: ' + node.tagName)
level = 1
elif typ not in TYPES:
logging.warn('Found String: ' + typ)
typ = STRING
else:
logging.warn('Found Type: ' + typ)
logging.warn('Adding: ' + str((node, typ)))
attributes.setdefault(node.tagName, (typ, [child]))
logging.warn('Attr: ' + str((type_name, top_node_name, attributes)))
return type_name, top_node_name, attributes
| [
[
1,
0,
0.0263,
0.0088,
0,
0.66,
0,
715,
0,
1,
0,
0,
715,
0,
0
],
[
1,
0,
0.0439,
0.0088,
0,
0.66,
0.0833,
290,
0,
1,
0,
0,
290,
0,
0
],
[
1,
0,
0.0526,
0.0088,
0,
... | [
"import logging",
"from xml.dom import minidom",
"from xml.dom import pulldom",
"BOOLEAN = \"boolean\"",
"STRING = \"String\"",
"GROUP = \"Group\"",
"DEFAULT_INTERFACES = ['FoursquareType']",
"INTERFACES = {\n}",
"DEFAULT_CLASS_IMPORTS = [\n]",
"CLASS_IMPORTS = {\n# 'Checkin': DEFAULT_CLASS_IMP... |
#coding=utf-8
from django.db import models
from django.contrib.auth.models import User
#记录订单的表,请根据自己需求完成
class Transaction(models.Model):
out_trade_no=models.CharField(max_length=32)
user=models.ForeignKey(User)
| [
[
1,
0,
0.25,
0.125,
0,
0.66,
0,
40,
0,
1,
0,
0,
40,
0,
0
],
[
1,
0,
0.375,
0.125,
0,
0.66,
0.5,
808,
0,
1,
0,
0,
808,
0,
0
],
[
3,
0,
0.875,
0.375,
0,
0.66,
1,... | [
"from django.db import models",
"from django.contrib.auth.models import User",
"class Transaction(models.Model):\n out_trade_no=models.CharField(max_length=32)\n user=models.ForeignKey(User)",
" out_trade_no=models.CharField(max_length=32)",
" user=models.ForeignKey(User)"
] |
#encoding=utf-8
from django.conf.urls.defaults import patterns,include,url
from views import *
urlpatterns=patterns('alipay.views',
url(r'checkout$',alipayTo),
url(r'alipay_notify$',alipay_notify),
url(r'alipay_return$',alipay_return),
)
| [
[
1,
0,
0.3,
0.1,
0,
0.66,
0,
341,
0,
3,
0,
0,
341,
0,
0
],
[
1,
0,
0.4,
0.1,
0,
0.66,
0.5,
547,
0,
1,
0,
0,
547,
0,
0
],
[
14,
0,
0.8,
0.5,
0,
0.66,
1,
990... | [
"from django.conf.urls.defaults import patterns,include,url",
"from views import *",
"urlpatterns=patterns('alipay.views',\n url(r'checkout$',alipayTo),\n url(r'alipay_notify$',alipay_notify),\n url(r'alipay_return$',alipay_return),\n )"
] |
#!/usr/bin/env python
#coding=utf-8
'''
#=============================================================================
# FileName: views.py
# Desc: 支付宝接口调用类
# Author: GitFree
# Email: pengzhao.lh@gmail.com
# LastChange: 2011-11-21 13:52:52
#=============================================================================
'''
from django.http import HttpResponseRedirect,HttpResponse
from django.shortcuts import render_to_response,RequestContext
from lib.AlipayService import Service
from lib.AlipayNotify import Notify
import hashlib
import time
from cart import Cart
from django.contrib.auth.decorators import login_required
@login_required
def alipayTo(request):
#/////////////////////////////////////////请求参数////////////////////////////////////////
#//////////必填参数//////////////////////////////////////////
#请与贵网站订单系统中的唯一订单号匹配
hash=hashlib.md5()
hash.update(str(time.time()))
out_trade_no=hash.hexdigest()
cart=Cart(request)
#订单名称,显示在支付宝收银台里的“商品名称”里,显示在支付宝的交易管理的“商品名称”的列表里
subject = u""
#订单描述、订单详细、订单备注,显示在支付宝收银台里的“商品描述”里
body = ""
for item in cart:
body="%s%sx%d+" % (body,item.product.name,item.quantity)
body=body[:-1]
#订单总金额,显示在支付宝收银台里的“应付总额”里
total_fee = cart.total_fee
#////////// end of 必填参数/////////////////////////////////////
#扩展功能参数——默认支付方式//
#默认支付方式,代码见“即时到帐接口”技术文档
paymethod = ""
#默认网银代号,代号列表见“即时到帐接口”技术文档“附录”→“银行列表”
defaultbank = ""
#扩展功能参数——防钓鱼//
#防钓鱼时间戳
anti_phishing_key = ""
#获取客户端的IP地址,建议:编写获取客户端IP地址的程序
exter_invoke_ip = ""
#注意:
#请慎重选择是否开启防钓鱼功能
#exter_invoke_ip、anti_phishing_key一旦被设置过,那么它们就会成为必填参数
#建议使用POST方式请求数据
#示例:
#exter_invoke_ip = ""
#Service aliQuery_timestamp = new Service()
#anti_phishing_key = aliQuery_timestamp.Query_timestamp()
#扩展功能参数——其他//
#商品展示地址,要用http:// 格式的完整路径,不允许加?id=123这类自定义参数
show_url = ""
#自定义参数,可存放任何内容(除=、&等特殊字符外),不会显示在页面上
##这里保存当前用户ID
extra_common_param = str(request.user.id)
#默认买家支付宝账号
buyer_email = ""
#扩展功能参数——分润(若要使用,请按照注释要求的格式赋值)//
#提成类型,该值为固定值:10,不需要修改
royalty_type = ""
#提成信息集
royalty_parameters = ""
#注意:
#与需要结合商户网站自身情况动态获取每笔交易的各分润收款账号、各分润金额、各分润说明。最多只能设置10条
#各分润金额的总和须小于等于total_fee
#提成信息集格式为:收款方Email_1^金额1^备注1|收款方Email_2^金额2^备注2
#示例:
#royalty_type = "10"
#royalty_parameters = "111@126.com^0.01^分润备注一|222@126.com^0.01^分润备注二"
#///////////////////////////////////end of 请求参数//////////////////////////////////////
#把请求参数打包成数组
sParaTemp = {}
sParaTemp["payment_type"]= "1"
sParaTemp["show_url"]= show_url
sParaTemp["out_trade_no"]= out_trade_no
sParaTemp["subject"]= subject
sParaTemp["body"]= body
sParaTemp["total_fee"]= total_fee
sParaTemp["paymethod"]= paymethod
sParaTemp["defaultbank"]= defaultbank
sParaTemp["anti_phishing_key"]= anti_phishing_key
sParaTemp["exter_invoke_ip"]= exter_invoke_ip
sParaTemp["extra_common_param"]= extra_common_param
sParaTemp["buyer_email"]= buyer_email
sParaTemp["royalty_type"]= royalty_type
sParaTemp["royalty_parameters"]= royalty_parameters
#构造即时到帐接口表单提交HTML数据,无需修改
alipay = Service()
strHtml = alipay.Create_direct_pay_by_user(sParaTemp)
return render_to_response("empty.html",{'content':strHtml})
def alipay_notify(request):
if request.POST:
notify = Notify()
verifyResult = aliNotify.Verify(request.POST,
request.POST["notify_id"], request.POST["sign"])
if verifyResult:#验证成功
#///////////////////////////////////////////////////////////////////////////////////
#请在这里加上商户的业务逻辑程序代码
#——请根据您的业务逻辑来编写程序(以下代码仅作参考)——
#获取支付宝的通知返回参数,可参考技术文档中服务器异步通知参数列表
order_no = request.POST["out_trade_no"] #获取订单号
total_fee = request.POST["total_fee"] #获取总金额
subject = request.POST["subject"] #商品名称、订单名称
body = request.POST["body"] #商品描述、订单备注、描述
if request.POST["trade_status"] == "TRADE_FINISHED"\
or request.POST["trade_status"] =="TRADE_SUCCESS":
#判断该笔订单是否在商户网站中已经做过处理
#如果没有做过处理,根据订单号(out_trade_no)在商户网站的订单系统中查到该笔订单的详细,并执行商户的业务程序
#如果有做过处理,不执行商户的业务程序
return HttpResponse("success") #请不要修改或删除
else:
return HttpResponse("success") #其他状态判断。普通即时到帐中,其他状态不用判断,直接打印success。
#——请根据您的业务逻辑来编写程序(以上代码仅作参考)——
#/////////////////////////////////////////////////////////////////////////////////////
else: #验证失败
return HttpResponse("fail")
else:
return HttpResponse("无通知参数")
@login_required
def alipay_return(request):
if request.GET:
notify = Notify()
verifyResult = aliNotify.Verify(request.GET,
request.GET["notify_id"], request.GET["sign"])
if verifyResult:#验证成功
#///////////////////////////////////////////////////////////////////////////////////
#请在这里加上商户的业务逻辑程序代码
#——请根据您的业务逻辑来编写程序(以下代码仅作参考)——
#获取支付宝的通知返回参数,可参考技术文档中服务器异步通知参数列表
order_no = request.GET["out_trade_no"] #获取订单号
total_fee = request.GET["total_fee"] #获取总金额
subject = request.GET["subject"] #商品名称、订单名称
body = request.GET["body"] #商品描述、订单备注、描述
if request.GET["trade_status"] == "TRADE_FINISHED"\
or request.GET["trade_status"] =="TRADE_SUCCESS":
#判断该笔订单是否在商户网站中已经做过处理
#如果没有做过处理,根据订单号(out_trade_no)在商户网站的订单系统中查到该笔订单的详细,并执行商户的业务程序
#如果有做过处理,不执行商户的业务程序
return HttpResponse("支付成功!")
else:
return HttpResponse("支付成功!") #其他状态判断。普通即时到帐中,其他状态不用判断,直接打印success。
#——请根据您的业务逻辑来编写程序(以上代码仅作参考)——
#/////////////////////////////////////////////////////////////////////////////////////
else: #验证失败
return HttpResponse("fail")
else:
return HttpResponse("无通知参数")
| [
[
8,
0,
0.0355,
0.0533,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0651,
0.0059,
0,
0.66,
0.0909,
779,
0,
2,
0,
0,
779,
0,
0
],
[
1,
0,
0.071,
0.0059,
0,
0.66,... | [
"'''\n#=============================================================================\n# FileName: views.py\n# Desc: 支付宝接口调用类\n# Author: GitFree\n# Email: pengzhao.lh@gmail.com\n# LastChange: 2011-11-21 13:52:52\n#============================================================================... |
#!/usr/bin/env python
#coding=utf-8
'''
#=============================================================================
# FileName: AlipayConfig.py
# Desc: 基础配置类
# Author: GitFree
# Email: pengzhao.lh@gmail.com
# LastChange: 2011-11-21 13:53:39
#=============================================================================
'''
class Config:
def __init__(self):
#↓↓↓↓↓↓↓↓↓↓请在这里配置您的基本信息↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓
#合作身份者ID,以2088开头由16位纯数字组成的字符串
self.partner = ""
#交易安全检验码,由数字和字母组成的32位字符串
self.key = ""
#签约支付宝账号或卖家支付宝帐户
self.seller_email = ""
#页面跳转同步返回页面文件路径 要用 http://格式的完整路径,不允许加?id=123这类自定义参数
self.return_url = ""
#服务器通知的页面文件路径 要用 http://格式的完整路径,不允许加?id=123这类自定义参数
self.notify_url = ""
#↑↑↑↑↑↑↑↑↑↑请在这里配置您的基本信息↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑
#字符编码格式 目前支持 gbk 或 utf-8
self.input_charset = "utf-8"
#签名方式 不需修改
self.sign_type = "MD5"
#访问模式,根据自己的服务器是否支持ssl访问,若支持请选择https;若不支持请选择http
self.transport = "http"
| [
[
8,
0,
0.1875,
0.2812,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
3,
0,
0.6875,
0.6562,
0,
0.66,
1,
979,
0,
1,
0,
0,
0,
0,
0
],
[
2,
1,
0.7031,
0.625,
1,
0.99,
0,... | [
"'''\n#=============================================================================\n# FileName: AlipayConfig.py\n# Desc: 基础配置类\n# Author: GitFree\n# Email: pengzhao.lh@gmail.com\n# LastChange: 2011-11-21 13:53:39\n#========================================================================... |
#!/usr/bin/env python
#coding=utf-8
'''
#=============================================================================
# FileName: AlipayNotify.py
# Desc: 支付宝通知处理类
# Author: GitFree
# Email: pengzhao.lh@gmail.com
# LastChange: 2011-09-27 00:52:09
#=============================================================================
'''
from AlipayConfig import Config
from AlipayCore import Core
import urllib2
# #///////////////////注意/////////////////////////////
# 调试通知返回时,可查看或改写log日志的写入TXT里的数据,来检查通知返回是否正常
# </summary>
class Notify :
#HTTPS支付宝通知路径
Https_veryfy_url = "https://www.alipay.com/cooperate/gateway.do?service=notify_verify&"
#HTTP支付宝通知路径
Http_veryfy_url = "http://notify.alipay.com/trade/notify_query.do?"
# 从配置文件中初始化变量
# <param name="inputPara">通知返回参数数组</param>
# <param name="notify_id">通知验证ID</param>
def __init__(self):
config=Config()
#合作身份者ID
self.partner = config.partner
#交易安全校验码
self.key = config.key
self.input_charset = config.input_charset
#签名方式
self.sign_type = config.sign_type
#访问模式
self.transport = config.transport
# <summary>
# 验证消息是否是支付宝发出的合法消息
# </summary>
# <param name="inputPara">通知返回参数数组</param>
# <param name="notify_id">通知验证ID</param>
# <param name="sign">支付宝生成的签名结果</param>
# <returns>验证结果</returns>
def Verify(self,inputPara,notify_id, sign):
#获取返回回来的待签名数组签名后结果
mysign = self.GetResponseMysign(inputPara)
#获取是否是支付宝服务器发来的请求的验证结果
responseTxt = "true"
if notify_id != "" :
responseTxt = self.GetResponseTxt(notify_id)
#写日志记录(若要调试,请取消下面两行注释)
#sWord = "responseTxt=" + responseTxt + "\n sign=" + sign + "&mysign=" + mysign + "\n 返回回来的参数:" + GetPreSignStr(inputPara) + "\n "
#Core.LogResult(sWord)
#验证
#responsetTxt的结果不是true,与服务器设置问题、合作身份者ID、notify_id一分钟失效有关
#mysign与sign不等,与安全校验码、请求时的参数格式(如:带自定义参数等)、编码格式有关
if responseTxt == "true" and sign == mysign:#验证成功
return True
else:#验证失败
return False
# <summary>
# 获取待签名字符串(调试用)
# </summary>
# <param name="inputPara">通知返回参数数组</param>
# <returns>待签名字符串</returns>
def GetPreSignStr(self,inputPara):
sPara = {}
#过滤空值、sign与sign_type参数
sPara = Core.FilterPara(inputPara)
#获取待签名字符串
preSignStr = Core.CreateLinkString(sPara)
return preSignStr
# <summary>
# 获取返回回来的待签名数组签名后结果
# </summary>
# <param name="inputPara">通知返回参数数组</param>
# <returns>签名结果字符串</returns>
def GetResponseMysign(self,inputPara):
sPara ={}
#过滤空值、sign与sign_type参数
sPara = Core.FilterPara(inputPara)
#获得签名结果
mysign = Core.BuildMysign(sPara, self.key, self.sign_type, self.input_charset)
return mysign
# <summary>
# 获取是否是支付宝服务器发来的请求的验证结果
# </summary>
# <param name="notify_id">通知验证ID</param>
# <returns>验证结果</returns>
def GetResponseTxt(self,notify_id,timeout=120000):
veryfy_url = self.transport == "https" and self.Https_veryfy_url or self.Http_veryfy_url
veryfy_url += "partner=" + self.partner + "¬ify_id=" + notify_id
#获取远程服务器ATN结果,验证是否是支付宝服务器发来的请求
open = urllib2.urlopen(veryfy_url, timeout=timeout)
return open.read()
| [
[
8,
0,
0.0577,
0.0865,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.1154,
0.0096,
0,
0.66,
0.25,
599,
0,
1,
0,
0,
599,
0,
0
],
[
1,
0,
0.125,
0.0096,
0,
0.66,
... | [
"'''\n#=============================================================================\n# FileName: AlipayNotify.py\n# Desc: 支付宝通知处理类\n# Author: GitFree\n# Email: pengzhao.lh@gmail.com\n# LastChange: 2011-09-27 00:52:09\n#=====================================================================... |
#!/usr/bin/env python
#coding=utf-8
'''
#=============================================================================
# FileName: AlipaySubmit.py
# Desc: 支付宝各接口请求提交类
# Author: GitFree
# Email: pengzhao.lh@gmail.com
# LastChange: 2011-09-24 14:20:35
#=============================================================================
'''
from AlipayConfig import Config
from AlipayCore import Core
class Submit:
def __init__(self):
config=Config()
#交易安全校验码
self.key = config.key
#编码格式
self.input_charset = config.input_charset
#签名方式
self.sign_type = config.sign_type
# 生成要请求给支付宝的参数数组
# <param name="sParaTemp">请求前的参数List</param>
# <returns>要请求的参数List</returns>
def BuildRequestPara(self,sParaTemp):
#待签名请求参数数组
sPara={}
#签名结果
mysign = ""
#过滤签名参数数组
sPara = Core.FilterPara(sParaTemp)
#获得签名结果
##########################################################################################
mysign = Core.BuildMysign(sPara, self.key, self.sign_type, self.input_charset)
#签名结果与签名方式加入请求提交参数组中
sPara["sign"]=mysign
sPara["sign_type"]= self.sign_type
return sPara
# 生成要请求给支付宝的参数数组
# <param name="sParaTemp">请求前的参数数组</param>
# <returns>要请求的参数数组字符串</returns>
def BuildRequestParaToString(self,sParaTemp):
#待签名请求参数数组
sPara ={}
sPara = BuildRequestPara(sParaTemp)
#把参数组中所有元素,按照“参数=参数值”的模式用“&”字符拼接成字符串
strRequestData = Core.CreateLinkString(sPara)
return strRequestData
# <summary>
# 构造提交表单HTML数据
# </summary>
# <param name="sParaTemp">请求参数数组</param>
# <param name="gateway">网关地址</param>
# <param name="strMethod">提交方式。两个值可选:post、get</param>
# <param name="strButtonValue">确认按钮显示文字</param>
# <returns>提交表单HTML文本</returns>
def BuildFormHtml(self,sParaTemp,gateway,strMethod,strButtonValue):
#待请求参数数组
dicPara ={}
dicPara = self.BuildRequestPara(sParaTemp)
sbHtml =[]
sbHtml.append("<form id='alipaysubmit' name='alipaysubmit' action='" + gateway +
"_input_charset=" + self.input_charset + "' method='" + strMethod.lower() + "'>")
for key in dicPara:
sbHtml.append("<input type='hidden' name='%s' value='%s' />" %(key,dicPara[key]))
#submit按钮控件请不要含有name属性
sbHtml.append("<input type='submit' value='" + strButtonValue + "' style='display:none'></form>")
sbHtml.append("<script>document.forms['alipaysubmit'].submit()</script>")
return ''.join(sbHtml)
####################未完成#######################################
'''
# <summary>
# 构造模拟远程HTTP的POST请求,获取支付宝的返回XML处理结果
# </summary>
# <param name="sParaTemp">请求参数数组</param>
# <param name="gateway">网关地址</param>
# <returns>支付宝返回XML处理结果</returns>
def SendPostInfo(sParaTemp,gateway):
#待请求参数数组字符串
strRequestData = BuildRequestParaToString(sParaTemp)
#把数组转换成流中所需字节数组类型
Encoding code = Encoding.GetEncoding(self.input_charset)
byte[] bytesRequestData = code.GetBytes(strRequestData)
#构造请求地址
strUrl = gateway + "_input_charset=" + self.input_charset
#请求远程HTTP
XmlDocument xmlDoc = new XmlDocument()
try:
#设置HttpWebRequest基本信息
HttpWebRequest myReq = (HttpWebRequest)HttpWebRequest.Create(strUrl)
myReq.Method = "post"
myReq.ContentType = "application/x-www-form-urlencoded"
#填充POST数据
myReq.ContentLength = bytesRequestData.Length
Stream requestStream = myReq.GetRequestStream()
requestStream.Write(bytesRequestData, 0, bytesRequestData.Length)
requestStream.Close()
#发送POST数据请求服务器
HttpWebResponse HttpWResp = (HttpWebResponse)myReq.GetResponse()
Stream myStream = HttpWResp.GetResponseStream()
#获取服务器返回信息
XmlTextReader Reader = new XmlTextReader(myStream)
xmlDoc.Load(Reader)
except Exception as exp:
strXmlError = "<error>" + exp + "</error>"
xmlDoc.LoadXml(strXmlError)
return xmlDoc
}
# <summary>
# 构造模拟远程HTTP的GET请求,获取支付宝的返回XML处理结果
# </summary>
# <param name="sParaTemp">请求参数数组</param>
# <param name="gateway">网关地址</param>
# <returns>支付宝返回XML处理结果</returns>
public static XmlDocument SendGetInfo(SortedDictionary<string, string> sParaTemp, string gateway)
{
#待请求参数数组字符串
string strRequestData = BuildRequestParaToString(sParaTemp)
#构造请求地址
string strUrl = gateway + strRequestData
#请求远程HTTP
XmlDocument xmlDoc = new XmlDocument()
try
{
#设置HttpWebRequest基本信息
HttpWebRequest myReq = (HttpWebRequest)HttpWebRequest.Create(strUrl)
myReq.Method = "get"
#发送POST数据请求服务器
HttpWebResponse HttpWResp = (HttpWebResponse)myReq.GetResponse()
Stream myStream = HttpWResp.GetResponseStream()
#获取服务器返回信息
XmlTextReader Reader = new XmlTextReader(myStream)
xmlDoc.Load(Reader)
}
catch (Exception exp)
{
string strXmlError = "<error>" + exp.Message + "</error>"
xmlDoc.LoadXml(strXmlError)
}
return xmlDoc
}
}
'''
| [
[
8,
0,
0.0349,
0.0523,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.064,
0.0058,
0,
0.66,
0.25,
599,
0,
1,
0,
0,
599,
0,
0
],
[
1,
0,
0.0698,
0.0058,
0,
0.66,
... | [
"'''\n#=============================================================================\n# FileName: AlipaySubmit.py\n# Desc: 支付宝各接口请求提交类\n# Author: GitFree\n# Email: pengzhao.lh@gmail.com\n# LastChange: 2011-09-24 14:20:35\n#==================================================================... |
#!/usr/bin/env python
#coding=utf-8
'''
#=============================================================================
# FileName: AlipayService.py
# Desc: 支付宝接口构造类
# Author: GitFree
# Email: pengzhao.lh@gmail.com
# LastChange: 2011-09-24 01:47:10
#=============================================================================
'''
from AlipayConfig import Config
from AlipaySubmit import Submit
# 要传递的参数要么不允许为空,要么就不要出现在数组与隐藏控件或URL链接里。
class Service:
# 构造函数
def __init__(self):
# 从配置文件及入口文件中初始化变量
config=Config()
#合作者身份ID
self.partner = config.partner
#字符编码格式
self.input_charset = config.input_charset
#签约支付宝账号或卖家支付宝帐户
self.seller_email = config.seller_email
#页面跳转同步返回页面文件路径
self.return_url = config.return_url
#服务器通知的页面文件路径
self.notify_url =config.notify_url
#支付宝网关地址(新)
self.GATEWAY_NEW = "https://mapi.alipay.com/gateway.do?"
# 构造即时到帐接口
# <param name="sParaTemp">请求参数集合</param>
# <returns>表单提交HTML信息</returns>
def Create_direct_pay_by_user(self,sParaTemp):
#增加基本配置
sParaTemp["service"]="create_direct_pay_by_user"
sParaTemp["partner"]=self.partner
sParaTemp["_input_charset"]= self.input_charset
sParaTemp["seller_email"]= self.seller_email
sParaTemp["return_url"]=self.return_url
sParaTemp["notify_url"]= self.notify_url
#确认按钮显示文字
strButtonValue = u"确认"
#表单提交HTML数据
strHtml = ""
#构造表单提交HTML数据
submit=Submit()
strHtml = submit.BuildFormHtml(sParaTemp, self.GATEWAY_NEW, "get", strButtonValue)
return strHtml
#未完成
# 用于防钓鱼,调用接口query_timestamp来获取时间戳的处理函数
# <returns>时间戳字符串</returns>
def Query_timestamp(self):
url = self.GATEWAY_NEW + "service=query_timestamp&partner=" + Config.partner
encrypt_key = ""
#从网络读取xml,未完成
#XmlTextReader Reader = new XmlTextReader(url)
#XmlDocument xmlDoc = new XmlDocument()
#xmlDoc.Load(Reader)
#encrypt_key = xmlDoc.SelectSingleNode("/alipay/response/timestamp/encrypt_key").InnerText
return encrypt_key
#******************若要增加其他支付宝接口,可以按照下面的格式定义******************//
# <summary>
# 构造(支付宝接口名称)接口
# </summary>
# <param name="sParaTemp">请求参数集合List</param>
# <returns>表单提交HTML文本或者支付宝返回XML处理结果</returns>
def AlipayInterface(self,sParaTemp):
#增加基本配置
#表单提交HTML数据变量
strHtml = ""
#构造请求参数数组
#构造给支付宝处理的请求
#请求方式有以下三种:
#1.构造表单提交HTML数据:Submit.BuildFormHtml()
#2.构造模拟远程HTTP的POST请求,获取支付宝的返回XML处理结果:Submit.SendPostInfo()
#3.构造模拟远程HTTP的GET请求,获取支付宝的返回XML处理结果:Submit.SendGetInfo()
#请根据不同的接口特性三选一
return strHtml
| [
[
8,
0,
0.0645,
0.0968,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.1183,
0.0108,
0,
0.66,
0.3333,
599,
0,
1,
0,
0,
599,
0,
0
],
[
1,
0,
0.129,
0.0108,
0,
0.66,... | [
"'''\n#=============================================================================\n# FileName: AlipayService.py\n# Desc: 支付宝接口构造类\n# Author: GitFree\n# Email: pengzhao.lh@gmail.com\n# LastChange: 2011-09-24 01:47:10\n#====================================================================... |
#!/usr/bin/env python
#coding=utf-8
'''
#=============================================================================
# FileName: AlipayCore.py
# Desc: 支付宝接口共用函数类
# Author: GitFree
# Email: pengzhao.lh@gmail.com
# LastChange: 2011-09-24 03:32:13
#=============================================================================
'''
import hashlib
import os.path
class Core:
def __init__(self):
pass
# 生成签名结果
# <param name="sArray">要签名的数组</param>
# <param name="key">安全校验码</param>
# <param name="sign_type">签名类型</param>
# <param name="input_charset">编码格式</param>
# <returns>签名结果字符串</returns>
@staticmethod
def BuildMysign(paramDic,key,sign_type,input_charset):
prestr = Core.CreateLinkString(paramDic) #把数组所有元素,按照“参数=参数值”的模式用“&”字符拼接成字符串
prestr = prestr + key #把拼接后的字符串再与安全校验码直接连接起来
mysign = Core.Sign(prestr, sign_type, input_charset) #把最终的字符串签名,获得签名结果
return mysign
# <summary>
# 除去数组中的空值和签名参数
# </summary>
# <param name="dicArrayPre">过滤前的参数组</param>
# <returns>过滤后的参数组</returns>
@staticmethod
def FilterPara(paramDicPre):
paramDic ={}
for key in paramDicPre:
if key.lower() != "sign" and key.lower() != "sign_type" and paramDicPre[key] != "" and paramDicPre[key]!= None:
paramDic[key]=paramDicPre[key]
return paramDic
# <summary>
# 把数组所有元素排序,按照“参数=参数值”的模式用“&”字符拼接成字符串
# </summary>
# <param name="sArray">需要拼接的数组</param>
# <returns>拼接完成以后的字符串</returns>
@staticmethod
def CreateLinkString(paramDic):
paramKeys=paramDic.keys()
#排序
paramKeys.sort()
preList=[]
for key in paramKeys:
preList.append('%s=%s'% (key,paramDic[key]))
joined_string='&'.join(preList)
return joined_string
# <summary>
# 签名字符串
# </summary>
# <param name="prestr">需要签名的字符串</param>
# <param name="sign_type">签名类型,这里支持只MD5</param>
# <param name="input_charset">编码格式</param>
# <returns>签名结果</returns>
@staticmethod
def Sign(prestr,sign_type,input_charset):
prestr=prestr.encode(input_charset)
if (sign_type.upper() == "MD5"):
hash=hashlib.md5()
hash.update(prestr)
result=hash.hexdigest()
else:
result=sign_type+u'方式签名尚未开发,清自行添加'
return result
# <summary>
# 写日志,方便测试(网站需求,可以改成把记录存入数据库)
# </summary>
# <param name="sWord">要写入日志里的文本内容</param>
@staticmethod
def LogResult(sWord):
strPath = os.path.dirname(__file__)
strPath = os.path.join(strPath, "log.txt")
f=file(strPath,'a')
f.write(time.strftime("%y-%m-%d-%H:%M:%S ")+sWord)
f.close()
| [
[
8,
0,
0.0674,
0.1011,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.1348,
0.0112,
0,
0.66,
0.3333,
154,
0,
1,
0,
0,
154,
0,
0
],
[
1,
0,
0.1461,
0.0112,
0,
0.66... | [
"'''\n#=============================================================================\n# FileName: AlipayCore.py\n# Desc: 支付宝接口共用函数类\n# Author: GitFree\n# Email: pengzhao.lh@gmail.com\n# LastChange: 2011-09-24 03:32:13\n#=====================================================================... |
import os
numInput = 0
numOutput = 0
folderName = raw_input("Enter the folder containing the data files to convert ... ")
files = os.listdir(folderName)
for each in files:
if(os.path.isdir(each)):
files.remove(each)
fileStr = ""
for eachFile in files:
fileStr += eachFile + ", "
print("Converting "+fileStr)
if(os.path.isdir(folderName+"/Converted/") == False):
os.mkdir(folderName+"/Converted/")
for FileName in files:
FileObj = open(folderName+"/"+FileName, "r")
rawData = FileObj.readlines()
FileObj.close()
data = []
dataPairs = 0
for eachLine in rawData:
data.append(eachLine.strip(" \n\t").split(","))
dataPairs += 1
numOutput = 1
for each in data:
numInput = len(each) - 1
newFile = open(folderName+"/Converted/fann_"+FileName, "w")
newFile.write(str(dataPairs)+" "+str(numInput)+" "+str(numOutput)+"\n")
lineCount = 0
for each in data:
output = each[-1]
del each[-1]
inputStr = " ".join(each)
newFile.write(inputStr+"\n")
newFile.write(output +"\n")
lineCount+=1
print("Converting line "+str(lineCount)+" of file "+FileName)
newFile.close()
| [
[
1,
0,
0.0189,
0.0189,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
14,
0,
0.0566,
0.0189,
0,
0.66,
0.1,
317,
1,
0,
0,
0,
0,
1,
0
],
[
14,
0,
0.0755,
0.0189,
0,
0.6... | [
"import os",
"numInput = 0",
"numOutput = 0",
"folderName = raw_input(\"Enter the folder containing the data files to convert ... \")",
"files = os.listdir(folderName)",
"for each in files:\n\tif(os.path.isdir(each)):\n\t\tfiles.remove(each)",
"\tif(os.path.isdir(each)):\n\t\tfiles.remove(each)",
"\t\... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.