code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
"""
Setup your Routes options here
"""
import os
from routes import Mapper
def make_map(global_conf={}, app_conf={}):
root_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
map = Mapper(directory=os.path.join(root_path, 'controllers'))
# This route handles displaying the error page and graphics used in the 404/500
# error pages. It should likely stay at the top to ensure that the error page is
# displayed properly.
map.connect('error/:action/:id', controller='error')
# All of the participant controllers
map.connect('/p/picture/:id', controller='p', action='picture')
map.connect('/p/audio/:id', controller='p', action='audio')
map.connect('/p/submit/:id', controller='p', action='submit')
map.connect('/p/create', controller='p', action='create')
map.connect('/p/:id', controller='p', action='index')
# All of the researcher controllers
map.connect('/r', controller='r', action='index')
map.connect('/r/entry/:entry_id', controller='r', action='entry')
map.connect('/r/entries/:study_id/:user_id', controller='r', action='entries')
map.connect('/r/report/:user_id', controller='r', action='report')
map.connect('/r/addstudy', controller='r', action='addstudy')
map.connect('/r/changepassword/:user_id', controller='r', action='changepassword')
map.connect('/r/adduser', controller='r', action='adduser')
map.connect('/r/picture/:id', controller='r', action='picture')
map.connect('/r/audio/:id', controller='r', action='audio')
# All of the login and action controllers
map.connect(':controller/:action/:id')
# Public HTML directory
map.connect('*url', controller='template', action='view')
return map
| Python |
#
| Python |
from paste import httpexceptions
from paste.cascade import Cascade
from paste.urlparser import StaticURLParser
from paste.registry import RegistryManager
from paste.deploy.config import ConfigMiddleware, CONFIG
from paste.deploy.converters import asbool
from pylons.error import error_template
from pylons.middleware import ErrorHandler, ErrorDocuments, StaticJavascripts, error_mapper
import pylons.wsgiapp
from forlater.config.environment import load_environment
import forlater.lib.helpers
import forlater.lib.app_globals as app_globals
def make_app(global_conf, full_stack=True, **app_conf):
"""Create a WSGI application and return it
global_conf is a dict representing the Paste configuration options, the
paste.deploy.converters should be used when parsing Paste config options
to ensure they're treated properly.
"""
# Setup the Paste CONFIG object, adding app_conf/global_conf for legacy code
conf = global_conf.copy()
conf.update(app_conf)
conf.update(dict(app_conf=app_conf, global_conf=global_conf))
CONFIG.push_process_config(conf)
# Load our Pylons configuration defaults
config = load_environment(global_conf, app_conf)
config.init_app(global_conf, app_conf, package='forlater')
# Load our default Pylons WSGI app and make g available
app = pylons.wsgiapp.PylonsApp(config, helpers=forlater.lib.helpers,
g=app_globals.Globals)
g = app.globals
app = ConfigMiddleware(app, conf)
# YOUR MIDDLEWARE
# Put your own middleware here, so that any problems are caught by the error
# handling middleware underneath
# If errror handling and exception catching will be handled by middleware
# for multiple apps, you will want to set full_stack = False in your config
# file so that it can catch the problems.
if asbool(full_stack):
# Change HTTPExceptions to HTTP responses
app = httpexceptions.make_middleware(app, global_conf)
# Error Handling
app = ErrorHandler(app, global_conf, error_template=error_template, **config.errorware)
# Display error documents for 401, 403, 404 status codes (if debug is disabled also
# intercepts 500)
app = ErrorDocuments(app, global_conf, mapper=error_mapper, **app_conf)
# Establish the Registry for this application
app = RegistryManager(app)
static_app = StaticURLParser(config.paths['static_files'])
javascripts_app = StaticJavascripts()
app = Cascade([static_app, javascripts_app, app])
return app
| Python |
import os
import pylons.config
import webhelpers
from forlater.config.routing import make_map
def load_environment(global_conf={}, app_conf={}):
map = make_map(global_conf, app_conf)
# Setup our paths
root_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
paths = {'root_path': root_path,
'controllers': os.path.join(root_path, 'controllers'),
'templates': [os.path.join(root_path, path) for path in \
('components', 'templates')],
'static_files': os.path.join(root_path, 'public')
}
# The following template options are passed to your template engines
tmpl_options = {}
tmpl_options['myghty.log_errors'] = True
tmpl_options['myghty.escapes'] = dict(l=webhelpers.auto_link, s=webhelpers.simple_format)
# Add your own template options config options here, note that all config options will override
# any Pylons config options
# Return our loaded config object
return pylons.config.Config(tmpl_options, map, paths)
| Python |
"""
Helper functions
All names available in this module will be available under the Pylons h object.
"""
from webhelpers import *
from pylons.helpers import log
from pylons.i18n import get_lang, set_lang
| Python |
from pylons import Response, c, g, cache, request, session
from pylons.controllers import WSGIController
from pylons.decorators import jsonify, validate
from pylons.templating import render, render_response
from pylons.helpers import abort, redirect_to, etag_cache
from pylons.i18n import N_, _, ungettext
import forlater.models as model
import forlater.lib.helpers as h
import urllib
import httplib
import os
import shutil
from paste import fileapp
from forlater.lib.entry import *
from sqlalchemy import *
class BaseController(WSGIController):
def __call__(self, environ, start_response):
# Insert any code to be run per request here. The Routes match
# is under environ['pylons.routes_dict'] should you want to check
# the action or route vars here
return WSGIController.__call__(self, environ, start_response)
def _serve_file(self, path):
fapp = fileapp.FileApp(path)
return fapp(request.environ, self.start_response)
def get_message():
msg = session.get('msg', None)
if msg:
del session['msg']
session.save()
return msg
def put_message(msg):
session['msg'] = msg
session.save()
def get_error():
error = session.get('error', None)
if error:
del session['error']
session.save()
return error
def put_error(error):
session['error'] = error
session.save()
def send_sms(phone_num, text):
conn = httplib.HTTPConnection(g.sms_url_server)
params = urllib.urlencode([('PhoneNumber', phone_num), ('Text', text)])
url = g.sms_url_path + "?" + params
print url
conn.request("GET", url)
r1 = conn.getresponse()
status = r1.status
data1 = r1.read()
conn.close()
return (status == httplib.OK)
def load_entry_prompts(xml):
return parse_entry_from_file(os.path.join(g.entry_file_dir, xml))
def write_entry_prompts(ep, xml):
return write_entry_to_file(ep, os.path.join(g.entry_file_dir, xml))
def instantiate_prompts(e):
# get the name of the default study prompt
col_s = model.studies_table.c
col_u = model.users_table.c
col_e = model.entries_table.c
s = select([col_s.prompt_xml], and_(col_u.phone==e['phone'], col_u.study_id == col_s.id))
r = s.execute()
row = r.fetchone()
r.close()
if not row:
return None
# copy the file
new_filename = row[0].split('.')[0] + "_" + e['phone'] + "_" + str(e['id']) + ".xml"
source = open(os.path.join(g.entry_file_dir, row[0]), 'r')
dest = open(os.path.join(g.entry_file_dir, new_filename ), 'w')
shutil.copyfileobj(source, dest)
source.close()
dest.close()
# store the name of the xml file in the database
u = model.entries_table.update(col_e.id==e['id']).execute(prompt_xml=new_filename)
u.close()
return new_filename
# Include the '_' function in the public names
__all__ = [__name for __name in locals().keys() if not __name.startswith('_') \
or __name == '_']
| Python |
header_txt = """\
<html>
<head>
<title>4l8r%(title)s</title>
<script src="/js/prototype.js" type="text/javascript"></script>
<script src="/js/scriptaculous.js" type="text/javascript"></script>
<style>
a:link {color: #990000}
a:visited {color: #990000}
a:hover {color: #CC6666}
a:active {color: #CC6666}
h1 {
font-family: calibri,verdana,sans-serif;
font-size: 18pt;
color: #990000;
}
h2 {
font-family: calibri,verdana,sans-serif;
font-size: 16pt;
color: #990000;
}
h3 {
font-family: calibri,verdana,sans-serif;
font-size: 14pt;
color: #990000;
}
p,li {
font-family: calibri,verdana,sans-serif;
font-size: 11pt;
}
.small {
font-size: 9pt;
}
.serif {
font-family: georgia,garamond,serif;
}
.login {
font-family: calibri,verdana,sans-serif;
font-size: 11pt;
padding: 5px;
align: right;
}
.emph {
color: #990000;
}
.errorbox {
display: block;
padding: 4px;
border: 1px solid #990000;
background: #FFEEEE;
color: #990000;
}
.msgbox {
display: block;
padding: 4px;
border: 1px solid #000000;
background: #EEEEEE;
color: #000000;
}
.shortbox {
display: block;
padding-left: 15px;
padding-right: 15px;
border: 1px solid #999999;
background: #EEEEEE;
color: #000000;
width: 300px;
}
</style>
</head>
<body bgcolor='#ffffff' marginwidth=0 marginheight=0 leftmargin=0 rightmargin=0 topmargin=0 onload='%(onload)s'>
%(acct)s
<table width='100%%' height='56' background='/images/header_bg.gif' cellpadding=0 cellspacing=0 border=0>
<tr background='/images/header_bg.gif' height=56>
<td width=50 height=56 alight='left' background='/images/header_bg.gif'><img src='/images/header_bg.gif' width=50 height=56></td>
<td width=96 height=56 alight='left' background='/images/header_bg.gif'><img src='/images/header.gif' width=96 height=56></td>
<td width=* height=56 alight='left' background='/images/header_bg.gif'><img src='/images/header_bg.gif' width=96 height=56></td>
</tr>
</table>
<table width='100%%' cellpadding=5>
<tr><td width='1'> </td>
<td wdith='*'>
%(error)s%(msg)s"""
footer_txt = """\
</td>
<td width='1'> </td>
</body>
</html>
"""
def header(title=None, username=None, msg=None, error=None):
onload_txt = ''
if title:
title_txt = ' - ' + title
else:
title_txt = ''
if username:
acct_txt = '<div class="login" align="right"><strong>' + username + '</strong> | <a href="/login/prefs">Account Preferences</a> | <a href="/login/logout">Log out</a></div>\n'
else:
acct_txt = '<div class="login"> </div>\n'
if error:
error_txt = "<p class='errorbox' id='error_box'>Error: " + error + "</p>\n"
onload_txt += 'new Effect.Highlight("error_box", {startcolor: "#FF9999", duration: 4.0});'
else:
error_txt = ''
if msg:
msg_txt = "<p class='msgbox' id='msg_box'>" + msg + "</p>\n"
onload_txt += 'new Effect.Highlight("msg_box", {startcolor: "#CCFFCC", duration: 4.0});'
else:
msg_txt = ''
return header_txt % {'title' : title_txt,
'acct' : acct_txt,
'error' : error_txt,
'msg' : msg_txt,
'onload' : onload_txt}
def footer():
return footer_txt
| Python |
from xml.dom.minidom import *
class Entry:
def __init__(self, id=0, questions=[]):
self.id = id
self.questions = questions[:]
def _to_xml(self, d):
e = d.createElement('entry')
e.setAttribute('id', str(self.id))
for q in self.questions:
e.appendChild(q._to_xml(d))
return e
def _from_xml(self, e):
try:
self.id = int(e.getAttribute('id'))
except:
pass
self.questions = []
qxmls = e.getElementsByTagName('question')
for qxml in qxmls:
new_q = Question()
new_q._from_xml(qxml)
self.questions.append(new_q)
class Question:
def __init__(self, q_type='', completed=False, prompt=None, response=None, choices=[]):
self.q_type = q_type
self.completed = completed
self.prompt = prompt
self.response = response
self.choices = choices[:]
def _to_xml(self, d):
e = d.createElement('question')
e.setAttribute('type', str(self.q_type))
e.setAttribute('completed', str(self.completed))
if self.prompt:
p = d.createElement('prompt')
t = d.createTextNode(str(self.prompt))
p.appendChild(t)
e.appendChild(p)
if self.response:
r = d.createElement('response')
t = d.createTextNode(str(self.response))
r.appendChild(t)
e.appendChild(r)
for c in self.choices:
e.appendChild(c._to_xml(d))
return e
def _from_xml(self, e):
self.q_type = e.getAttribute('type')
self.completed = _parse_bool(e.getAttribute('completed'))
pxmls = e.getElementsByTagName('prompt')
if len(pxmls) >= 1:
self.prompt = ''
for n in pxmls[0].childNodes:
if n.nodeType == n.TEXT_NODE:
self.prompt = self.prompt + n.nodeValue.strip()
rxmls = e.getElementsByTagName('response')
if len(rxmls) >= 1:
self.response = ''
for n in rxmls[0].childNodes:
if n.nodeType == n.TEXT_NODE:
self.response = self.response + n.nodeValue.strip()
self.choices = []
cxmls = e.getElementsByTagName('choice')
for cxml in cxmls:
new_c = Choice()
new_c._from_xml(cxml)
self.choices.append(new_c)
class Choice:
def __init__(self, value='', response=False):
self.value = value
self.response = response
def _to_xml(self, d):
e = d.createElement('choice')
e.setAttribute('response', str(self.response))
t = d.createTextNode(str(self.value))
e.appendChild(t)
return e
def _from_xml(self, e):
self.response = _parse_bool(e.getAttribute('response'))
self.value = ''
for n in e.childNodes:
if n.nodeType == n.TEXT_NODE:
self.value = self.value + n.nodeValue.strip()
def parse_entry_from_file(filename):
f = None
result = None
try:
f = file(filename, 'r')
except:
return result
try:
x = f.read()
d = parseString(x)
entries = d.getElementsByTagName('entry')
if len(entries) > 0:
e = Entry()
e._from_xml(entries[0])
result = e
except:
pass
if f:
try:
f.close()
except:
pass
return result
def write_entry_to_file(e, filename):
f = None
result = False
try:
f = file(filename, 'w')
except:
return result
try:
d = getDOMImplementation().createDocument(None, None, None)
d.appendChild(e._to_xml(d))
f.write(d.toprettyxml())
result = True
except:
pass
if f:
try:
f.close()
except:
pass
return result
def _parse_bool(s):
if s.lower() == 'true':
return True
else:
return False
| Python |
import re
class Globals(object):
def __init__(self, global_conf, app_conf, **extra):
"""
Globals acts as a container for objects available throughout
the life of the application.
One instance of Globals is created by Pylons during
application initialization and is available during requests
via the 'g' variable.
``global_conf``
The same variable used throughout ``config/middleware.py``
namely, the variables from the ``[DEFAULT]`` section of the
configuration file.
``app_conf``
The same ``kw`` dictionary used throughout
``config/middleware.py`` namely, the variables from the
section in the config file for your application.
``extra``
The configuration returned from ``load_config`` in
``config/middleware.py`` which may be of use in the setup of
your global variables.
"""
self.audio_file_dir = '/4l8r/audio/'
self.picture_file_dir = '/4l8r/pictures/'
self.entry_file_dir = '/4l8r/entries/'
self.sms_url_server = '172.27.76.138:8802'
self.sms_url_path = '/Send%20Text%20Message.htm'
self.upload_auth_token = 'supersecret4l8r'
self.re_strip_non_number = re.compile(r'[^0-9]')
self.long_datetime_format = '%A, %B %d, %Y at %I:%M %p'
self.short_datetime_format = '%m/%d/%Y at %I:%M %p'
def __del__(self):
"""
Put any cleanup code to be run when the application finally exits
here.
"""
pass
| Python |
#!python
"""Bootstrap setuptools installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import sys
DEFAULT_VERSION = "0.6c5"
DEFAULT_URL = "http://cheeseshop.python.org/packages/%s/s/setuptools/" % sys.version[:3]
md5_data = {
'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb',
'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b',
'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a',
'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618',
'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac',
'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5',
'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4',
'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c',
'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b',
'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27',
'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277',
'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa',
'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e',
'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e',
'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f',
'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2',
'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc',
'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167',
'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64',
'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d',
}
import sys, os
def _validate_md5(egg_name, data):
if egg_name in md5_data:
from md5 import md5
digest = md5(data).hexdigest()
if digest != md5_data[egg_name]:
print >>sys.stderr, (
"md5 validation of %s failed! (Possible download problem?)"
% egg_name
)
sys.exit(2)
return data
def use_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
download_delay=15
):
"""Automatically find/download setuptools and make it available on sys.path
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end with
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
it is not already available. If `download_delay` is specified, it should
be the number of seconds that will be paused before initiating a download,
should one be required. If an older version of setuptools is installed,
this routine will print a message to ``sys.stderr`` and raise SystemExit in
an attempt to abort the calling script.
"""
try:
import setuptools
if setuptools.__version__ == '0.0.1':
print >>sys.stderr, (
"You have an obsolete version of setuptools installed. Please\n"
"remove it from your system entirely before rerunning this script."
)
sys.exit(2)
except ImportError:
egg = download_setuptools(version, download_base, to_dir, download_delay)
sys.path.insert(0, egg)
import setuptools; setuptools.bootstrap_install_from = egg
import pkg_resources
try:
pkg_resources.require("setuptools>="+version)
except pkg_resources.VersionConflict, e:
# XXX could we install in a subprocess here?
print >>sys.stderr, (
"The required version of setuptools (>=%s) is not available, and\n"
"can't be installed while this script is running. Please install\n"
" a more recent version first.\n\n(Currently using %r)"
) % (version, e.args[0])
sys.exit(2)
def download_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
delay = 15
):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download attempt.
"""
import urllib2, shutil
egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
url = download_base + egg_name
saveto = os.path.join(to_dir, egg_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
from distutils import log
if delay:
log.warn("""
---------------------------------------------------------------------------
This script requires setuptools version %s to run (even to display
help). I will attempt to download it for you (from
%s), but
you may need to enable firewall access for this script first.
I will start the download in %d seconds.
(Note: if this machine does not have network access, please obtain the file
%s
and place it in this directory before rerunning this script.)
---------------------------------------------------------------------------""",
version, download_base, delay, url
); from time import sleep; sleep(delay)
log.warn("Downloading %s", url)
src = urllib2.urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = _validate_md5(egg_name, src.read())
dst = open(saveto,"wb"); dst.write(data)
finally:
if src: src.close()
if dst: dst.close()
return os.path.realpath(saveto)
def main(argv, version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
try:
import setuptools
except ImportError:
egg = None
try:
egg = download_setuptools(version, delay=0)
sys.path.insert(0,egg)
from setuptools.command.easy_install import main
return main(list(argv)+[egg]) # we're done here
finally:
if egg and os.path.exists(egg):
os.unlink(egg)
else:
if setuptools.__version__ == '0.0.1':
# tell the user to uninstall obsolete version
use_setuptools(version)
req = "setuptools>="+version
import pkg_resources
try:
pkg_resources.require(req)
except pkg_resources.VersionConflict:
try:
from setuptools.command.easy_install import main
except ImportError:
from easy_install import main
main(list(argv)+[download_setuptools(delay=0)])
sys.exit(0) # try to force an exit
else:
if argv:
from setuptools.command.easy_install import main
main(argv)
else:
print "Setuptools version",version,"or greater has been installed."
print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
def update_md5(filenames):
"""Update our built-in md5 registry"""
import re
from md5 import md5
for name in filenames:
base = os.path.basename(name)
f = open(name,'rb')
md5_data[base] = md5(f.read()).hexdigest()
f.close()
data = [" %r: %r,\n" % it for it in md5_data.items()]
data.sort()
repl = "".join(data)
import inspect
srcfile = inspect.getsourcefile(sys.modules[__name__])
f = open(srcfile, 'rb'); src = f.read(); f.close()
match = re.search("\nmd5_data = {\n([^}]+)}", src)
if not match:
print >>sys.stderr, "Internal error!"
sys.exit(2)
src = src[:match.start(1)] + repl + src[match.end(1):]
f = open(srcfile,'w')
f.write(src)
f.close()
if __name__=='__main__':
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
update_md5(sys.argv[2:])
else:
main(sys.argv[1:])
| Python |
#!python
"""Bootstrap setuptools installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import sys
DEFAULT_VERSION = "0.6c5"
DEFAULT_URL = "http://cheeseshop.python.org/packages/%s/s/setuptools/" % sys.version[:3]
md5_data = {
'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb',
'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b',
'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a',
'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618',
'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac',
'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5',
'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4',
'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c',
'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b',
'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27',
'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277',
'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa',
'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e',
'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e',
'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f',
'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2',
'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc',
'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167',
'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64',
'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d',
}
import sys, os
def _validate_md5(egg_name, data):
if egg_name in md5_data:
from md5 import md5
digest = md5(data).hexdigest()
if digest != md5_data[egg_name]:
print >>sys.stderr, (
"md5 validation of %s failed! (Possible download problem?)"
% egg_name
)
sys.exit(2)
return data
def use_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
download_delay=15
):
"""Automatically find/download setuptools and make it available on sys.path
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end with
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
it is not already available. If `download_delay` is specified, it should
be the number of seconds that will be paused before initiating a download,
should one be required. If an older version of setuptools is installed,
this routine will print a message to ``sys.stderr`` and raise SystemExit in
an attempt to abort the calling script.
"""
try:
import setuptools
if setuptools.__version__ == '0.0.1':
print >>sys.stderr, (
"You have an obsolete version of setuptools installed. Please\n"
"remove it from your system entirely before rerunning this script."
)
sys.exit(2)
except ImportError:
egg = download_setuptools(version, download_base, to_dir, download_delay)
sys.path.insert(0, egg)
import setuptools; setuptools.bootstrap_install_from = egg
import pkg_resources
try:
pkg_resources.require("setuptools>="+version)
except pkg_resources.VersionConflict, e:
# XXX could we install in a subprocess here?
print >>sys.stderr, (
"The required version of setuptools (>=%s) is not available, and\n"
"can't be installed while this script is running. Please install\n"
" a more recent version first.\n\n(Currently using %r)"
) % (version, e.args[0])
sys.exit(2)
def download_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
delay = 15
):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download attempt.
"""
import urllib2, shutil
egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
url = download_base + egg_name
saveto = os.path.join(to_dir, egg_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
from distutils import log
if delay:
log.warn("""
---------------------------------------------------------------------------
This script requires setuptools version %s to run (even to display
help). I will attempt to download it for you (from
%s), but
you may need to enable firewall access for this script first.
I will start the download in %d seconds.
(Note: if this machine does not have network access, please obtain the file
%s
and place it in this directory before rerunning this script.)
---------------------------------------------------------------------------""",
version, download_base, delay, url
); from time import sleep; sleep(delay)
log.warn("Downloading %s", url)
src = urllib2.urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = _validate_md5(egg_name, src.read())
dst = open(saveto,"wb"); dst.write(data)
finally:
if src: src.close()
if dst: dst.close()
return os.path.realpath(saveto)
def main(argv, version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
try:
import setuptools
except ImportError:
egg = None
try:
egg = download_setuptools(version, delay=0)
sys.path.insert(0,egg)
from setuptools.command.easy_install import main
return main(list(argv)+[egg]) # we're done here
finally:
if egg and os.path.exists(egg):
os.unlink(egg)
else:
if setuptools.__version__ == '0.0.1':
# tell the user to uninstall obsolete version
use_setuptools(version)
req = "setuptools>="+version
import pkg_resources
try:
pkg_resources.require(req)
except pkg_resources.VersionConflict:
try:
from setuptools.command.easy_install import main
except ImportError:
from easy_install import main
main(list(argv)+[download_setuptools(delay=0)])
sys.exit(0) # try to force an exit
else:
if argv:
from setuptools.command.easy_install import main
main(argv)
else:
print "Setuptools version",version,"or greater has been installed."
print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
def update_md5(filenames):
"""Update our built-in md5 registry"""
import re
from md5 import md5
for name in filenames:
base = os.path.basename(name)
f = open(name,'rb')
md5_data[base] = md5(f.read()).hexdigest()
f.close()
data = [" %r: %r,\n" % it for it in md5_data.items()]
data.sort()
repl = "".join(data)
import inspect
srcfile = inspect.getsourcefile(sys.modules[__name__])
f = open(srcfile, 'rb'); src = f.read(); f.close()
match = re.search("\nmd5_data = {\n([^}]+)}", src)
if not match:
print >>sys.stderr, "Internal error!"
sys.exit(2)
src = src[:match.start(1)] + repl + src[match.end(1):]
f = open(srcfile,'w')
f.write(src)
f.close()
if __name__=='__main__':
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
update_md5(sys.argv[2:])
else:
main(sys.argv[1:])
| Python |
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='Forlater',
version="0.8.5",
#description="",
#author="",
#author_email="",
#url="",
install_requires=["Pylons>=0.9.5"],
packages=find_packages(),
package_data={
'forlater': ['i18n/*/LC_MESSAGES/*.mo'],
'': ['public/*.*', 'templates/*.*', 'public/images/*.*', 'public/js/*.*'],
},
include_package_data=True,
test_suite = 'nose.collector',
entry_points="""
[paste.app_factory]
main=forlater:make_app
[paste.app_install]
main=pylons.util:PylonsInstaller
""",
)
| Python |
#!/usr/bin/python
# Load the WSGI application from the config file
from paste.deploy import loadapp
wsgi_app = loadapp('config:/usr/lib/cgi-bin/4l8r/production.ini')
# Deploy it using FastCGI
if __name__ == '__main__':
from flup.server.fcgi import WSGIServer
WSGIServer(wsgi_app).run()
| Python |
#!/usr/bin/python
# Load the WSGI application from the config file
from paste.deploy import loadapp
wsgi_app = loadapp('config:/usr/lib/cgi-bin/4l8r/production.ini')
import os, sys
def run_with_cgi(application):
environ = dict(os.environ.items())
environ['wsgi.input'] = sys.stdin
environ['wsgi.errors'] = sys.stderr
environ['wsgi.version'] = (1,0)
environ['wsgi.multithread'] = False
environ['wsgi.multiprocess'] = True
environ['wsgi.run_once'] = True
if environ.get('HTTPS','off') in ('on','1'):
environ['wsgi.url_scheme'] = 'https'
else:
environ['wsgi.url_scheme'] = 'http'
headers_set = []
headers_sent = []
def write(data):
if not headers_set:
raise AssertionError("write() before start_response()")
elif not headers_sent:
# Before the first output, send the stored headers
status, response_headers = headers_sent[:] = headers_set
sys.stdout.write('Status: %s\r\n' % status)
for header in response_headers:
sys.stdout.write('%s: %s\r\n' % header)
sys.stdout.write('\r\n')
sys.stdout.write(data)
sys.stdout.flush()
def start_response(status,response_headers,exc_info=None):
if exc_info:
try:
if headers_sent:
# Re-raise original exception if headers sent
raise exc_info[0], exc_info[1], exc_info[2]
finally:
exc_info = None # avoid dangling circular ref
elif headers_set:
raise AssertionError("Headers already set!")
headers_set[:] = [status,response_headers]
return write
result = application(environ, start_response)
try:
for data in result:
if data: # don't send headers until body appears
write(data)
if not headers_sent:
write('') # send headers now if body was empty
finally:
if hasattr(result,'close'):
result.close()
# Deploy it using FastCGI
if __name__ == '__main__':
run_with_cgi(wsgi_app)
| Python |
#!/usr/bin/python
# Load the WSGI application from the config file
from paste.deploy import loadapp
wsgi_app = loadapp('config:/usr/lib/cgi-bin/4l8r/production.ini')
# Deploy it using FastCGI
if __name__ == '__main__':
from flup.server.fcgi import WSGIServer
WSGIServer(wsgi_app).run()
| Python |
#!/usr/bin/python
# Load the WSGI application from the config file
from paste.deploy import loadapp
wsgi_app = loadapp('config:/usr/lib/cgi-bin/4l8r/production.ini')
import os, sys
def run_with_cgi(application):
environ = dict(os.environ.items())
environ['wsgi.input'] = sys.stdin
environ['wsgi.errors'] = sys.stderr
environ['wsgi.version'] = (1,0)
environ['wsgi.multithread'] = False
environ['wsgi.multiprocess'] = True
environ['wsgi.run_once'] = True
if environ.get('HTTPS','off') in ('on','1'):
environ['wsgi.url_scheme'] = 'https'
else:
environ['wsgi.url_scheme'] = 'http'
headers_set = []
headers_sent = []
def write(data):
if not headers_set:
raise AssertionError("write() before start_response()")
elif not headers_sent:
# Before the first output, send the stored headers
status, response_headers = headers_sent[:] = headers_set
sys.stdout.write('Status: %s\r\n' % status)
for header in response_headers:
sys.stdout.write('%s: %s\r\n' % header)
sys.stdout.write('\r\n')
sys.stdout.write(data)
sys.stdout.flush()
def start_response(status,response_headers,exc_info=None):
if exc_info:
try:
if headers_sent:
# Re-raise original exception if headers sent
raise exc_info[0], exc_info[1], exc_info[2]
finally:
exc_info = None # avoid dangling circular ref
elif headers_set:
raise AssertionError("Headers already set!")
headers_set[:] = [status,response_headers]
return write
result = application(environ, start_response)
try:
for data in result:
if data: # don't send headers until body appears
write(data)
if not headers_sent:
write('') # send headers now if body was empty
finally:
if hasattr(result,'close'):
result.close()
# Deploy it using FastCGI
if __name__ == '__main__':
run_with_cgi(wsgi_app)
| Python |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# $File: plot.py
# $Date: Thu Mar 13 20:23:07 2014 +0800
# $Author: jiakai <jia.kai66@gmail.com>
import pyximport
pyximport.install()
import datafilter
from sample_and_record import get_db_conn
import matplotlib.pyplot as plt
from matplotlib import dates
import sys
import os.path
from datetime import datetime
from collections import namedtuple
from copy import deepcopy
class AQSample(object):
__slots__ = ['time', 'local', 'us', 'cn']
def __init__(self, time, local, us, cn):
for i in self.__slots__:
setattr(self, i, locals()[i])
def load_sample():
result = list()
cursor = get_db_conn().cursor()
for i in list(cursor.execute('SELECT * FROM history')):
if i['local_conc'] > 0 and i['us_conc'] > 0 and i['cn_conc'] > 0:
result.append(AQSample(
i['time'], i['local_conc'], i['us_conc'], i['cn_conc']))
return result
def get_time_plot(plot_ax):
fig = plt.figure()
ax = fig.add_subplot(111)
plot_ax(ax)
#ax.xaxis.set_major_locator(dates.HourLocator(interval=8))
ax.xaxis.set_major_formatter(dates.DateFormatter('%m/%d %H:%M'))
ax.set_ylim(bottom=0)
ax.set_ylabel(r'mass concentration/$\mu gm^{-3}$')
plt.legend(loc='best')
plt.xticks(rotation='vertical')
plt.subplots_adjust(bottom=.3)
def main(output_dir):
orig = load_sample()
filtered = deepcopy(orig)
datafilter.rescale(filtered)
datafilter.smooth_gaussian(filtered)
avg = deepcopy(orig)
datafilter.smooth_average(avg, 3600)
avg1 = [avg[0]]
for i in avg[1:]:
if i.time - avg1[-1].time >= 3600:
avg1.append(i)
avg = avg1
time = dates.date2num([datetime.fromtimestamp(i.time) for i in orig])
def plot_local(ax):
ax.plot(time, [i.local for i in filtered], label='filtered')
ax.plot(time, [i.local for i in orig], label='original')
def plot_compare(ax):
ax.plot(time, [i.local for i in filtered], label='filtered')
ax.plot(time, [i.us for i in orig], label='us')
ax.plot(time, [i.cn for i in orig], label='cn')
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot([i.local for i in avg[:-1]], [i.us for i in avg[1:]], '.')
ax.set_xlabel('local')
ax.set_ylabel(r'us/$\mu gm^{-3}$')
plt.savefig(os.path.join(output_dir, 'scatter.png'))
get_time_plot(plot_local)
plt.savefig(os.path.join(output_dir, 'local.png'))
get_time_plot(plot_compare)
plt.savefig(os.path.join(output_dir, 'compare.png'))
plt.show()
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.exit('usage: {} <output dir>'.format(sys.argv[0]))
main(sys.argv[1])
| Python |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# $File: daemon.py
# $Date: Mon Mar 03 09:35:26 2014 +0800
# $Author: jiakai <jia.kai66@gmail.com>
from sample_and_record import insert_db_entry
import time
MIN_UPTIME = 120
SAMPLE_DELTA = 300
def get_uptime():
with open('/proc/uptime') as fin:
return map(float, fin.read().split())[0]
if __name__ == '__main__':
if get_uptime() < MIN_UPTIME:
time.sleep(MIN_UPTIME)
while True:
insert_db_entry()
time.sleep(SAMPLE_DELTA)
| Python |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# $File: dataproc.py
# $Date: Tue Mar 11 00:25:03 2014 +0800
# $Author: jiakai <jia.kai66@gmail.com>
import math
class LinearFunction(object):
k = None
b = None
def __init__(self, x0, y0, x1, y1):
self.k = (y1 - y0) / (x1 - x0)
self.b = y0 - self.k * x0
assert abs(self.eval(x0) - y0) < 1e-5
assert abs(self.eval(x1) - y1) < 1e-5
def eval(self, x):
return self.k * x + self.b
AQI_TABLE = [
[12.1, 51],
[35.5, 101],
[55.5, 151],
[150.5, 201],
[250.5, 301],
[350.5, 401],
[500, 500]
]
# pcs: m^{-3}
lowratio2pcs = LinearFunction(0, 0, 9.8e-2, 6000/283e-6).eval
# ug/m^3 * volume
_PCS2CONCENTRATION_COEFF = 1.65e6/1e-6 * math.pi*4/3*(2.5e-6/2)**3
def pcs2concentration(pcs):
"""ug/m^3"""
return _PCS2CONCENTRATION_COEFF * pcs
def concentration2aqi(con):
if con < 0:
return con
iprev = [0, 0]
for i in AQI_TABLE:
if con < i[0]:
return LinearFunction(iprev[0], iprev[1], i[0], i[1]).eval(con)
iprev = i
return con
def aqi2concentration(aqi):
if aqi < 0:
return aqi
iprev = [0, 0]
for i in AQI_TABLE:
if aqi < i[1]:
return LinearFunction(iprev[1], iprev[0], i[1], i[0]).eval(aqi)
iprev = i
return aqi
def lowratio2concentration(lr):
return pcs2concentration(lowratio2pcs(lr))
def lowratio2aqi(lr):
return concentration2aqi(lowratio2concentration(lr))
| Python |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# $File: sample_and_record.py
# $Date: Wed Mar 12 23:03:06 2014 +0800
# $Author: jiakai <jia.kai66@gmail.com>
from gevent import monkey
monkey.patch_all()
import gevent
import re
import json
import urllib2
import subprocess
import os
import os.path
import sqlite3
import calendar
import sys
from datetime import datetime
from dataproc import lowratio2concentration, aqi2concentration
DB_PATH = os.path.join(os.path.dirname(__file__), 'data', 'history.db')
LOCAL_SAMPLE_EXE = os.path.join(os.path.dirname(__file__), 'getsample')
def get_conc_aqicn():
""":return: tuple(<US data>, <Haidian data>)"""
URL = 'http://aqicn.org/city/beijing'
def parse_page(page):
p0 = page.find('var localCityData =')
p0 = page.find('{', p0)
p1 = page.find(';', p0)
p1 = page.rfind(']')
data = page[p0:p1+1] + '}'
return json.loads(data)
page = urllib2.urlopen(URL).read()
data = parse_page(page)['Beijing']
rst = [-1, -1]
for i in data:
if 'US Embassy' in i['city']:
assert rst[0] == -1
rst[0] = int(i['aqi'])
if 'Haidian Wanliu' in i['city']:
assert rst[1] == -1
rst[1] = int(i['aqi'])
return map(aqi2concentration, rst)
def get_conc_bjair():
""":return: tuple(<US data>, <CN data>)"""
URL = 'http://www.beijing-air.com'
page = urllib2.urlopen(URL).read().decode('utf-8')
data = re.split( ur'PM2.5浓度:([0-9]*)', page, re.MULTILINE)
return map(int, [data[3], data[1]])
get_conc = get_conc_bjair
def init_db(conn):
c = conn.cursor()
c.execute("""CREATE TABLE history
(time INTEGER PRIMARY KEY,
pm1_ratio REAL,
pm25_ratio REAL,
local_conc REAL,
us_conc REAL,
cn_conc REAL,
err_msg TEXT)""")
for col in 'local', 'us', 'cn':
c.execute("""CREATE INDEX idx_{0}_conc ON history ({0}_conc)""".format(
col))
conn.commit()
def get_db_conn():
exist = os.path.exists(DB_PATH)
conn = sqlite3.connect(DB_PATH)
if not exist:
init_db(conn)
conn.row_factory = sqlite3.Row
return conn
def get_local_sample():
""":return: list(pm1 ratio, pm25 ratio)"""
subp = subprocess.Popen(LOCAL_SAMPLE_EXE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = subp.communicate()
if subp.poll() is not None:
if subp.returncode:
raise RuntimeError(
'failed to run local sampler: ret={}\n{}\n{}\n'.format(
subp.returncode, stdout, stderr))
lines = stdout.split('\n')
return map(float, lines[:2])
def insert_db_entry():
time = calendar.timegm(datetime.utcnow().timetuple())
pm1_ratio, pm25_ratio, local_conc, us_conc, cn_conc = [-1] * 5
err_msg = None
job = gevent.spawn(get_conc)
try:
pm1_ratio, pm25_ratio = get_local_sample()
local_conc = lowratio2concentration(pm1_ratio - pm25_ratio)
except Exception as exc:
err_msg = 'failed to sample locally: {}'.format(exc)
job.join()
try:
if job.successful():
us_conc, cn_conc = job.value
else:
raise job.exception
except Exception as exc:
if err_msg is None:
err_msg = ''
err_msg = 'failed to retrieve AQI: {}'.format(exc)
conn = get_db_conn()
conn.cursor().execute(
"""INSERT INTO history VALUES (?, ?, ?, ?, ?, ?, ?)""",
(time, pm1_ratio, pm25_ratio, local_conc, us_conc, cn_conc, err_msg))
conn.commit()
if __name__ == '__main__':
if sys.argv[1:] == ['test']:
print get_conc_aqicn()
print get_conc_bjair()
else:
insert_db_entry()
| Python |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# $File: update_db.py
# $Date: Tue Mar 11 00:07:07 2014 +0800
# $Author: jiakai <jia.kai66@gmail.com>
import sys
sys.path.append('..')
from sample_and_record import get_db_conn, init_db
from dataproc import aqi2concentration, lowratio2concentration
def update_db():
conn = get_db_conn()
conn.cursor().execute("""ALTER TABLE history RENAME TO history00""")
init_db(conn)
c = conn.cursor()
for i in list(c.execute('SELECT * from history00')):
print i[0]
c.execute(
"""INSERT INTO history VALUES (?, ?, ?, ?, ?, ?, ?)""",
(i[0], i[1], i[2],
lowratio2concentration(i[1] - i[2]),
aqi2concentration(i[4]),
aqi2concentration(i[5]),
i[6]))
conn.commit()
if __name__ == '__main__':
update_db()
| Python |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# $File: datafilter.pyx
# $Date: Thu Mar 13 19:08:46 2014 +0800
# $Author: jiakai <jia.kai66@gmail.com>
cimport cython
from libc.stdlib cimport malloc, free
from libc.math cimport exp
cdef:
int FILTER_SIGMA = 1800
int FILTER_RADIUS = 3 * FILTER_SIGMA
double FILTER_SIGMA_INV = 1.0 / FILTER_SIGMA
double gaussian(double x):
x *= FILTER_SIGMA_INV
return exp(-0.5 * x * x)
struct SamplePoint:
int time
double val
def smooth_gaussian(pysample):
cdef:
size_t current, t, nr_sample
SamplePoint* sample
int cur_time, head_time, tail_time
double weight_sum, val_sum, w
sample = <SamplePoint*>malloc(len(pysample) * cython.sizeof(SamplePoint))
if sample is NULL:
raise MemoryError()
current = 0
for i in pysample:
sample[current].time = i.time
sample[current].val = i.local
current += 1
nr_sample = current
current = 0
for i in pysample:
cur_time = sample[current].time
head_time = cur_time - FILTER_RADIUS
tail_time = cur_time + FILTER_RADIUS
weight_sum = gaussian(0)
val_sum = weight_sum * sample[current].val
t = current - 1
while t < nr_sample and sample[t].time >= head_time:
w = gaussian(sample[t].time - cur_time)
weight_sum += w
val_sum += w * sample[t].val
t -= 1
t = current + 1
while t < nr_sample and sample[t].time <= tail_time:
w = gaussian(sample[t].time - cur_time)
weight_sum += w
val_sum += w * sample[t].val
t += 1
i.local = val_sum / weight_sum
current += 1
free(sample)
def smooth_average(pysample, length):
length /= 2
cdef:
size_t current, nr_sample, head, tail
SamplePoint* sample
double cur_val_sum
int cur_time, head_time, tail_time
sample = <SamplePoint*>malloc(len(pysample) * cython.sizeof(SamplePoint))
if sample is NULL:
raise MemoryError()
current = 0
for i in pysample:
sample[current].time = i.time
sample[current].val = i.local
current += 1
nr_sample = current
current = 0
head = 0
tail = 0
cur_val_sum = 0
for i in pysample:
cur_time = sample[current].time
head_time = cur_time - length
tail_time = cur_time + length
while sample[head].time < head_time:
cur_val_sum -= sample[head].val
head += 1
while tail < nr_sample and sample[tail].time < tail_time:
cur_val_sum += sample[tail].val
tail += 1
i.local = cur_val_sum / (tail - head)
current += 1
free(sample)
def rescale(sample):
"""min sum((i.local * x - i.us)^2)"""
sum_a = sum(i.local for i in sample)
sum_b = sum(i.us for i in sample)
x = sum_b / sum_a
print 'rescale:', x
for i in sample:
i.local *= x
| Python |
# -*- coding: utf-8 -*-
from __future__ import with_statement # This isn't required in Python 2.6
__metaclass__ = type
from contextlib import closing, contextmanager
import os, sys, traceback
import os.path
from mod_python import apache, util
from util import parse_qs
today = date.today
ver = sys.version_info
if ver[0]<2 and ver[1]<5:
raise EnvironmentError('Must have Python version 2.5 or higher.')
try:
import json
except ImportError:
raise EnvironmentError('Must have the json module. (It is included in Python 2.6 or can be installed on version 2.5.)')
try:
from PIL import Image
except ImportError:
raise EnvironmentError('Must have the PIL (Python Imaging Library).')
path_exists = os.path.exists
normalize_path = os.path.normpath
absolute_path = os.path.abspath
make_url = urlparse.urljoin
split_path = os.path.split
split_ext = os.path.splitext
euncode_urlpath = urllib.quote_plus
encode_json = json.JSONEcoder().encode
def encodeURLsafeBase64(data):
return base64.urlsafe_b64encode(data).replace('=','').replace(r'\x0A','')
def image(*args):
raise NotImplementedError
class Filemanager:
"""Replacement for FCKEditor's built-in file manager."""
def __init__(self, fileroot= '/'):
self.fileroot = fileroot
self.patherror = encode_json(
{
'Error' : 'No permission to operate on specified path.',
'Code' : -1
}
)
def isvalidrequest(self, **kwargs):
"""Returns an error if the given path is not within the specified root path."""
assert split_path(kwargs['path'])[0]==self.fileroot
assert not kwargs['req'] is None
def getinfo(self, path=None, getsize=true, req=None):
"""Returns a JSON object containing information about the given file."""
if not self.isvalidrequest(path,req):
return (self.patherror, None, 'application/json')
thefile = {
'Filename' : split_path(path)[-1],
'File Type' : '',
'Preview' : path if split_path(path)[-1] else 'images/fileicons/_Open.png',
'Path' : path,
'Error' : '',
'Code' : 0,
'Properties' : {
'Date Created' : '',
'Date Modified' : '',
'Width' : '',
'Height' : '',
'Size' : ''
}
}
imagetypes = set('gif','jpg','jpeg','png')
if not path_exists(path):
thefile['Error'] = 'File does not exist.'
return (encode_json(thefile), None, 'application/json')
if split_path(path)[-1]=='/':
thefile['File Type'] = 'Directory'
else:
thefile['File Type'] = split_ext(path)
if ext in imagetypes:
img = Image(path).size()
thefile['Properties']['Width'] = img[0]
thefile['Properties']['Height'] = img[1]
else:
previewPath = 'images/fileicons/' + ext.upper + '.png'
thefile['Preview'] = previewPath if path_exists('../../' + previewPath) else 'images/fileicons/default.png'
thefile['Properties']['Date Created'] = os.path.getctime(path)
thefile['Properties']['Date Modified'] = os.path.getmtime(path)
thefile['Properties']['Size'] = os.path.getsize(path)
req.content_type('application/json')
req.write(encode_json(thefile))
def getfolder(self, path=None, getsizes=true, req=None):
if not self.isvalidrequest(path,req):
return (self.patherror, None, 'application/json')
result = []
filtlist = file_listdirectory(path)
for i in filelist:
if i[0]=='.':
result += literal(self.getinfo(path + i, getsize=getsizes))
req.content_type('application/json')
req.write(encode_json(result))
def rename(self, old=None, new=None, req=None):
if not self.isvalidrequest(path=new,req=req):
return (self.patherror, None, 'application/json')
if old[-1]=='/':
old=old[:-1]
oldname = split_path(path)[-1]
path = string(old)
path = split_path(path)[0]
if not path[-1]=='/':
path += '/'
newname = encode_urlpath(new)
newpath = path + newname
os.path.rename(old, newpath)
result = {
'Old Path' : old,
'Old Name' : oldname,
'New Path' : newpath,
'New Name' : newname,
'Error' : 'There was an error renaming the file.' # todo: get the actual error
}
req.content_type('application/json')
req.write(encode_json(result))
def delete(self, path=None, req=None):
if not self.isvalidrequest(path,req):
return (self.patherror, None, 'application/json')
os.path.remove(path)
result = {
'Path' : path,
'Error' : 'There was an error renaming the file.' # todo: get the actual error
}
req.content_type('application/json')
req.write(encode_json(result))
def add(self, path=None, req=None):
if not self.isvalidrequest(path,req):
return (self.patherror, None, 'application/json')
try:
thefile = util.FieldStorage(req)['file'] #TODO get the correct param name for the field holding the file
newName = thefile.filename
with open(newName, 'rb') as f:
f.write(thefile.value)
except:
result = {
'Path' : path,
'Name' : newName,
'Error' : file_currenterror
}
else:
result = {
'Path' : path,
'Name' : newName,
'Error' : 'No file was uploaded.'
}
req.content_type('text/html')
req.write(('<textarea>' + encode_json(result) + '</textarea>'))
def addfolder(self, path, name):
if not self.isvalidrequest(path,req):
return (self.patherror, None, 'application/json')
newName = encode_urlpath(name)
newPath = path + newName + '/'
if not path_exists(newPath):
try:
os.mkdir(newPath)
except:
result = {
'Path' : path,
'Name' : newName,
'Error' : 'There was an error creating the directory.' # TODO grab the actual traceback.
}
def download(self, path=None, req=None):
if not self.isvalidrequest(path,req):
return (self.patherror, None, 'application/json')
name = path.split('/')[-1]
req.content_type('application/x-download')
req.filename=name
req.sendfile(path)
myFilemanager = Filemanager(fileroot='/var/www/html/dev/fmtest/UserFiles/') #modify fileroot as a needed
def handler(req):
#req.content_type = 'text/plain'
#req.write("Hello World!")
if req.method == 'POST':
kwargs = parse_qs(req.read())
elif req.method == 'GET':
kwargs = parse_qs(req.args)
#oldid = os.getuid()
#os.setuid(501)
try:
method=str(kwargs['mode'][0])
methodKWargs=kwargs.remove('mode')
methodKWargs['req']=req
myFilemanager.__dict__['method'](**methodKWargs)
return apache.OK
except KeyError:
return apache.HTTP_BAD_REQUEST
except Exception, (errno, strerror):
apache.log_error(strerror, apache.APLOG_CRIT)
return apache.HTTP_INTERNAL_SERVER_ERROR
#os.setuid(oldid)
| Python |
#
# jQuery File Tree
# Python/Django connector script
# By Martin Skou
#
import os
import urllib
def dirlist(request):
r=['<ul class="jqueryFileTree" style="display: none;">']
try:
r=['<ul class="jqueryFileTree" style="display: none;">']
d=urllib.unquote(request.POST.get('dir','c:\\temp'))
for f in os.listdir(d):
ff=os.path.join(d,f)
if os.path.isdir(ff):
r.append('<li class="directory collapsed"><a href="#" rel="%s/">%s</a></li>' % (ff,f))
else:
e=os.path.splitext(f)[1][1:] # get .ext and remove dot
r.append('<li class="file ext_%s"><a href="#" rel="%s">%s</a></li>' % (e,ff,f))
r.append('</ul>')
except Exception,e:
r.append('Could not load directory: %s' % str(e))
r.append('</ul>')
return HttpResponse(''.join(r)) | Python |
from sys import stdin
x, = map(float, stdin.readline().strip().split())
print abs(x)
| Python |
from sys import stdin
a, b = map(int, stdin.readline().strip().split())
print b, a
| Python |
from sys import stdin
n, = map(int, stdin.readline().strip().split())
print n*(n+1)/2
| Python |
from sys import stdin
a, b, c = map(int, stdin.readline().strip().split())
print "%.3lf" % ((a+b+c)/3.0)
| Python |
from sys import stdin
n = stdin.readline().strip().split()[0]
print '%c%c%c' % (n[2], n[1], n[0])
| Python |
from sys import stdin
n, = map(int, stdin.readline().strip().split())
money = n * 95
if money >= 300: money *= 0.85
print "%.2lf" % money
| Python |
from sys import stdin
from math import *
x1, y1, x2, y2 = map(float, stdin.readline().strip().split())
print "%.3lf" % hypot((x1-x2), (y1-y2))
| Python |
from sys import stdin
f, = map(float, stdin.readline().strip().split())
print "%.3lf" % (5*(f-32)/9)
| Python |
from sys import stdin
n, m = map(int, stdin.readline().strip().split())
a = (4*n-m)/2
b = n-a
if m % 2 == 1 or a < 0 or b < 0: print "No answer"
else: print a, b
| Python |
from sys import stdin
a, b, c = map(int, stdin.readline().strip().split())
if a*a + b*b == c*c or a*a + c*c == b*b or b*b + c*c == a*a: print "yes"
elif a + b <= c or a + c <= b or b + c <= a: print "not a triangle"
else: print "no"
| Python |
from sys import stdin
n, = map(int, stdin.readline().strip().split())
print ["yes", "no"][n % 2]
| Python |
from sys import stdin
from math import *
n, = map(int, stdin.readline().strip().split())
rad = radians(n)
print "%.3lf %.3lf" % (sin(rad), cos(rad))
| Python |
from sys import stdin
a = map(int, stdin.readline().strip().split())
a.sort()
print a[0], a[1], a[2]
| Python |
from sys import stdin
from math import *
r, h = map(float, stdin.readline().strip().split())
print "Area = %.3lf" % (pi*r*r*2 + 2*pi*r*h)
| Python |
from sys import stdin
from calendar import isleap
year, = map(int, stdin.readline().strip().split())
if isleap(year): print "yes"
else: print "no"
| Python |
s = i = 0
while True:
term = 1.0 / (i*2+1)
s += term * ((-1)**i)
if term < 1e-6: break
i += 1
print "%.6lf" % s
| Python |
from sys import stdin
n, m = map(int, stdin.readline().strip().split())
print "%.5lf" % sum([1.0/i/i for i in range(n,m+1)])
| Python |
from sys import stdin
from decimal import *
a, b, c = map(int, stdin.readline().strip().split())
getcontext().prec = c
print Decimal(a) / Decimal(b)
| Python |
from itertools import product
from math import *
def issqrt(n):
s = int(floor(sqrt(n)))
return s*s == n
aabb = [a*1100+b*11 for a,b in product(range(1,10),range(10))]
print ' '.join(map(str, filter(issqrt, aabb)))
| Python |
from sys import stdin
from math import *
n = int(stdin.readline().strip())
print sum(map(factorial, range(1,n+1))) % (10**6)
| Python |
from sys import stdin
def cycle(n):
if n == 1: return 0
elif n % 2 == 1: return cycle(n*3+1) + 1
else: return cycle(n/2) + 1
n = int(stdin.readline().strip())
print cycle(n)
| Python |
for abc in range(123, 329):
big = str(abc) + str(abc*2) + str(abc*3)
if(''.join(sorted(big)) == '123456789'): print abc, abc*2, abc*3
| Python |
from sys import stdin
print len(stdin.readline().strip())
| Python |
from sys import stdin
def solve(a, b, c):
for i in range(10, 101):
if i % 3 == a and i % 5 == b and i % 7 == c:
print i
return
print 'No answer'
a, b, c = map(int, stdin.readline().strip().split())
solve(a, b, c)
| Python |
from sys import stdin
data = map(int, stdin.readline().strip().split())
n, m = data[0], data[-1]
data = data[1:-1]
print len(filter(lambda x: x < m, data))
| Python |
from sys import stdin
a = map(int, stdin.readline().strip().split())
print "%d %d %.3lf" % (min(a), max(a), float(sum(a)) / len(a))
| Python |
from sys import stdin
n = int(stdin.readline().strip())
print "%.3lf" % sum([1.0/x for x in range(1,n+1)])
| Python |
from itertools import product
sol = [a*100+b*10+c for a,b,c in product(range(1,10), range(10), range(10)) if a**3+b**3+c**3 == a*100+b*10+c]
print '\n'.join(map(str, sol))
| Python |
from sys import stdin
n = int(stdin.readline().strip())
count = n*2-1
for i in range(n):
print ' '*i + '#'*count
count -= 2
| Python |
#! /usr/bin/env python
# coding=utf-8
#############################################################################
# #
# File: common.py #
# #
# Copyright (C) 2008-2010 Du XiaoGang <dugang.2008@gmail.com> #
# #
# Home: http://gappproxy.googlecode.com #
# #
# This file is part of GAppProxy. #
# #
# GAppProxy is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as #
# published by the Free Software Foundation, either version 3 of the #
# License, or (at your option) any later version. #
# #
# GAppProxy is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with GAppProxy. If not, see <http://www.gnu.org/licenses/>. #
# #
#############################################################################
import os, sys
def we_are_frozen():
"""Returns whether we are frozen via py2exe.
This will affect how we find out where we are located."""
return hasattr(sys, "frozen")
def module_path():
""" This will get us the program's directory,
even if we are frozen using py2exe"""
if we_are_frozen():
return os.path.dirname(sys.executable)
return os.path.dirname(__file__)
dir = module_path()
VERSION = "2.0.0"
LOAD_BALANCE = 'http://gappproxy-center.appspot.com/available_fetchserver.py'
GOOGLE_PROXY = 'www.google.cn:80'
DEF_LISTEN_PORT = 8000
DEF_LOCAL_PROXY = ''
DEF_FETCH_SERVER = ''
DEF_CONF_FILE = os.path.join(dir, 'proxy.conf')
DEF_CERT_FILE = os.path.join(dir, 'CA.cert')
DEF_KEY_FILE = os.path.join(dir, 'CA.key')
class GAppProxyError(Exception):
def __init__(self, reason):
self.reason = reason
def __str__(self):
return '<GAppProxy Error: %s>' % self.reason
| Python |
#! /usr/bin/env python
# coding=utf-8
#############################################################################
# #
# File: proxy.py #
# #
# Copyright (C) 2008-2010 Du XiaoGang <dugang.2008@gmail.com> #
# #
# Home: http://gappproxy.googlecode.com #
# #
# This file is part of GAppProxy. #
# #
# GAppProxy is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as #
# published by the Free Software Foundation, either version 3 of the #
# License, or (at your option) any later version. #
# #
# GAppProxy is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with GAppProxy. If not, see <http://www.gnu.org/licenses/>. #
# #
#############################################################################
import BaseHTTPServer, SocketServer, urllib, urllib2, urlparse, zlib, socket, os, common, sys, errno, base64, re
try:
import ssl
ssl_enabled = True
except:
ssl_enabled = False
# global varibles
listen_port = common.DEF_LISTEN_PORT
local_proxy = common.DEF_LOCAL_PROXY
fetch_server = common.DEF_FETCH_SERVER
google_proxy = {}
class LocalProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
PostDataLimit = 0x100000
def do_CONNECT(self):
if not ssl_enabled:
self.send_error(501, "Local proxy error, HTTPS needs Python2.6 or later.")
self.connection.close()
return
# for ssl proxy
(https_host, _, https_port) = self.path.partition(":")
if https_port != "" and https_port != "443":
self.send_error(501, "Local proxy error, Only port 443 is allowed for https.")
self.connection.close()
return
# continue
self.wfile.write("HTTP/1.1 200 OK\r\n")
self.wfile.write("\r\n")
ssl_sock = ssl.SSLSocket(self.connection, server_side=True, certfile=common.DEF_CERT_FILE, keyfile=common.DEF_KEY_FILE)
# rewrite request line, url to abs
first_line = ""
while True:
chr = ssl_sock.read(1)
# EOF?
if chr == "":
# bad request
ssl_sock.close()
self.connection.close()
return
# newline(\r\n)?
if chr == "\r":
chr = ssl_sock.read(1)
if chr == "\n":
# got
break
else:
# bad request
ssl_sock.close()
self.connection.close()
return
# newline(\n)?
if chr == "\n":
# got
break
first_line += chr
# got path, rewrite
(method, path, ver) = first_line.split()
if path.startswith("/"):
path = "https://%s" % https_host + path
# connect to local proxy server
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("127.0.0.1", listen_port))
sock.send("%s %s %s\r\n" % (method, path, ver))
# forward https request
ssl_sock.settimeout(1)
while True:
try:
data = ssl_sock.read(8192)
except ssl.SSLError, e:
if str(e).lower().find("timed out") == -1:
# error
sock.close()
ssl_sock.close()
self.connection.close()
return
# timeout
break
if data != "":
sock.send(data)
else:
# EOF
break
ssl_sock.setblocking(True)
# simply forward response
while True:
data = sock.recv(8192)
if data != "":
ssl_sock.write(data)
else:
# EOF
break
# clean
sock.close()
ssl_sock.shutdown(socket.SHUT_WR)
ssl_sock.close()
self.connection.close()
def do_METHOD(self):
# check http method and post data
method = self.command
if method == "GET" or method == "HEAD":
# no post data
post_data_len = 0
elif method == "POST":
# get length of post data
post_data_len = 0
for header in self.headers:
if header.lower() == "content-length":
post_data_len = int(self.headers[header])
break
# exceed limit?
if post_data_len > self.PostDataLimit:
self.send_error(413, "Local proxy error, Sorry, Google's limit, file size up to 1MB.")
self.connection.close()
return
else:
# unsupported method
self.send_error(501, "Local proxy error, Method not allowed.")
self.connection.close()
return
# get post data
post_data = ""
if post_data_len > 0:
post_data = self.rfile.read(post_data_len)
if len(post_data) != post_data_len:
# bad request
self.send_error(400, "Local proxy error, Post data length error.")
self.connection.close()
return
# do path check
(scm, netloc, path, params, query, _) = urlparse.urlparse(self.path)
if (scm.lower() != "http" and scm.lower() != "https") or not netloc:
self.send_error(501, "Local proxy error, Unsupported scheme(ftp for example).")
self.connection.close()
return
# create new path
path = urlparse.urlunparse((scm, netloc, path, params, query, ""))
# remove disallowed header
dhs = []
for header in self.headers:
hl = header.lower()
if hl == "if-range":
dhs.append(header)
elif hl == "range":
dhs.append(header)
for dh in dhs:
del self.headers[dh]
# create request for GAppProxy
params = urllib.urlencode({"method": method,
"encoded_path": base64.b64encode(path),
"headers": base64.b64encode(str(self.headers)),
"postdata": base64.b64encode(post_data),
"version": common.VERSION})
# accept-encoding: identity, *;q=0
# connection: close
request = urllib2.Request(fetch_server)
request.add_header("Accept-Encoding", "identity, *;q=0")
request.add_header("Connection", "close")
# create new opener
if local_proxy != "":
proxy_handler = urllib2.ProxyHandler({"http": local_proxy})
else:
proxy_handler = urllib2.ProxyHandler(google_proxy)
opener = urllib2.build_opener(proxy_handler)
# set the opener as the default opener
urllib2.install_opener(opener)
try:
resp = urllib2.urlopen(request, params)
except urllib2.HTTPError, e:
if e.code == 404:
self.send_error(404, "Local proxy error, Fetchserver not found at the URL you specified, please check it.")
elif e.code == 502:
self.send_error(502, "Local proxy error, Transmission error, or the fetchserver is too busy.")
else:
self.send_error(e.code)
self.connection.close()
return
except urllib2.URLError, e:
if local_proxy == "":
shallWeNeedGoogleProxy()
self.connection.close()
return
# parse resp
# for status line
line = resp.readline()
words = line.split()
status = int(words[1])
reason = " ".join(words[2:])
# for large response
if status == 592 and method == "GET":
self.processLargeResponse(path)
self.connection.close()
return
# normal response
try:
self.send_response(status, reason)
except socket.error, (err, _):
# Connection/Webpage closed before proxy return
if err == errno.EPIPE or err == 10053: # *nix, Windows
return
else:
raise
# for headers
text_content = True
while True:
line = resp.readline().strip()
# end header?
if line == "":
break
# header
(name, _, value) = line.partition(":")
name = name.strip()
value = value.strip()
# ignore Accept-Ranges
if name.lower() == "accept-ranges":
continue
self.send_header(name, value)
# check Content-Type
if name.lower() == "content-type":
if value.lower().find("text") == -1:
# not text
text_content = False
self.send_header("Accept-Ranges", "none")
self.end_headers()
# for page
if text_content:
data = resp.read()
if len(data) > 0:
self.wfile.write(zlib.decompress(data))
else:
self.wfile.write(resp.read())
self.connection.close()
do_GET = do_METHOD
do_HEAD = do_METHOD
do_POST = do_METHOD
def processLargeResponse(self, path):
cur_pos = 0
part_length = 0x100000 # 1m initial, at least 64k
first_part = True
content_length = 0
text_content = True
allowed_failed = 10
while allowed_failed > 0:
next_pos = 0
self.headers["Range"] = "bytes=%d-%d" % (cur_pos, cur_pos + part_length - 1)
# create request for GAppProxy
params = urllib.urlencode({"method": "GET",
"encoded_path": base64.b64encode(path),
"headers": base64.b64encode(str(self.headers)),
"postdata": base64.b64encode(""),
"version": common.VERSION})
# accept-encoding: identity, *;q=0
# connection: close
request = urllib2.Request(fetch_server)
request.add_header("Accept-Encoding", "identity, *;q=0")
request.add_header("Connection", "close")
# create new opener
if local_proxy != "":
proxy_handler = urllib2.ProxyHandler({"http": local_proxy})
else:
proxy_handler = urllib2.ProxyHandler(google_proxy)
opener = urllib2.build_opener(proxy_handler)
# set the opener as the default opener
urllib2.install_opener(opener)
resp = urllib2.urlopen(request, params)
# parse resp
# for status line
line = resp.readline()
words = line.split()
status = int(words[1])
# not range response?
if status != 206:
# reduce part_length and try again
if part_length > 65536:
part_length /= 2
allowed_failed -= 1
continue
# for headers
if first_part:
self.send_response(200, "OK")
while True:
line = resp.readline().strip()
# end header?
if line == "":
break
# header
(name, _, value) = line.partition(":")
name = name.strip()
value = value.strip()
# get total length from Content-Range
nl = name.lower()
if nl == "content-range":
m = re.match(r"bytes[ \t]+([0-9]+)-([0-9]+)/([0-9]+)", value)
if not m or int(m.group(1)) != cur_pos:
# Content-Range error, fatal error
return
next_pos = int(m.group(2)) + 1
content_length = int(m.group(3))
continue
# ignore Content-Length
elif nl == "content-length":
continue
# ignore Accept-Ranges
elif nl == "accept-ranges":
continue
self.send_header(name, value)
# check Content-Type
if nl == "content-type":
if value.lower().find("text") == -1:
# not text
text_content = False
if content_length == 0:
# no Content-Length, fatal error
return
self.send_header("Content-Length", content_length)
self.send_header("Accept-Ranges", "none")
self.end_headers()
first_part = False
else:
while True:
line = resp.readline().strip()
# end header?
if line == "":
break
# header
(name, _, value) = line.partition(":")
name = name.strip()
value = value.strip()
# get total length from Content-Range
if name.lower() == "content-range":
m = re.match(r"bytes[ \t]+([0-9]+)-([0-9]+)/([0-9]+)", value)
if not m or int(m.group(1)) != cur_pos:
# Content-Range error, fatal error
return
next_pos = int(m.group(2)) + 1
continue
# for body
if text_content:
data = resp.read()
if len(data) > 0:
self.wfile.write(zlib.decompress(data))
else:
self.wfile.write(resp.read())
# next part?
if next_pos == content_length:
return
cur_pos = next_pos
class ThreadingHTTPServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
pass
def shallWeNeedGoogleProxy():
global google_proxy
# send http request directly
#request = urllib2.Request(common.LOAD_BALANCE)
#try:
# avoid wait too long at startup, timeout argument need py2.6 or later.
# if sys.hexversion >= 0x20600f0:
# resp = urllib2.urlopen(request, timeout=3)
# else:
# resp = urllib2.urlopen(request)
# resp.read()
#except:
#google_proxy = {"http": common.GOOGLE_PROXY}
def getAvailableFetchServer():
request = urllib2.Request(common.LOAD_BALANCE)
if local_proxy != "":
proxy_handler = urllib2.ProxyHandler({"http": local_proxy})
else:
proxy_handler = urllib2.ProxyHandler(google_proxy)
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
try:
resp = urllib2.urlopen(request)
return resp.read().strip()
except:
return ""
def parseConf(confFile):
global listen_port, local_proxy, fetch_server
# read config file
try:
fp = open(confFile, "r")
except IOError:
# use default parameters
return
# parse user defined parameters
while True:
line = fp.readline()
if line == "":
# end
break
# parse line
line = line.strip()
if line == "":
# empty line
continue
if line.startswith("#"):
# comments
continue
(name, sep, value) = line.partition("=")
if sep == "=":
name = name.strip().lower()
value = value.strip()
if name == "listen_port":
listen_port = int(value)
elif name == "local_proxy":
local_proxy = value
elif name == "fetch_server":
fetch_server = value
fp.close()
if __name__ == "__main__":
# do the UNIX double-fork magic, see Stevens' "Advanced
# Programming in the UNIX Environment" for details (ISBN 0201563177)
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #1 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
# decouple from parent environment
os.chdir("/")
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #2 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
parseConf(common.DEF_CONF_FILE)
socket.setdefaulttimeout(10)
if local_proxy == "":
shallWeNeedGoogleProxy()
#if fetch_server == "":
# fetch_server = getAvailableFetchServer()
if fetch_server == "":
raise common.GAppProxyError("Invalid response from load balance server.")
pid = str(os.getpid())
f = open('/data/data/org.gaeproxy/python.pid','a')
f.write(" ")
f.write(pid)
f.close()
print "--------------------------------------------"
print "HTTPS Enabled: %s" % (ssl_enabled and "YES" or "NO")
print "Direct Fetch : %s" % (google_proxy and "NO" or "YES")
print "Listen Addr : 127.0.0.1:%d" % listen_port
print "Local Proxy : %s" % local_proxy
print "Fetch Server : %s" % fetch_server
print "PID : %s" % pid
print "--------------------------------------------"
httpd = ThreadingHTTPServer(("127.0.0.1", listen_port), LocalProxyHandler)
httpd.serve_forever()
| Python |
#!/usr/bin/env python
import sys, os
# do the UNIX double-fork magic, see Stevens' "Advanced
# Programming in the UNIX Environment" for details (ISBN 0201563177)
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #1 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
# decouple from parent environment
os.chdir("/")
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #2 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
pid = str(os.getpid())
f = open('/data/data/org.gaeproxy/python.pid','a')
f.write(" ")
f.write(pid)
f.close()
dir = os.path.abspath(os.path.dirname(sys.argv[0]))
sys.path.append(os.path.join(dir, 'src.zip'))
del sys, os, dir
import ProxyServer
ProxyServer.main()
| Python |
#!/usr/bin/env python
# coding:utf-8
# Based on GAppProxy 2.0.0 by Du XiaoGang <dugang@188.com>
# Based on WallProxy 0.4.0 by hexieshe <www.ehust@gmail.com>
from __future__ import with_statement
__version__ = '1.8.11'
__author__ = "{phus.lu,hewigovens}@gmail.com (Phus Lu and Hewig Xu)"
__config__ = 'proxy.ini'
try:
import gevent, gevent.monkey
gevent.monkey.patch_all(dns=gevent.version_info[0]>=1)
except:
pass
import sys
import os
import re
import time
import errno
import binascii
import itertools
import zlib
import struct
import random
import hashlib
import fnmatch
import base64
import urlparse
import thread
import threading
import socket
import ssl
import select
import httplib
import urllib2
import BaseHTTPServer
import SocketServer
import ConfigParser
import traceback
try:
import logging
except ImportError:
logging = None
try:
import ctypes
except ImportError:
ctypes = None
try:
import OpenSSL
except ImportError:
OpenSSL = None
try:
import sqlite3
except ImportError:
sqlite3 = None
class Common(object):
"""global config object"""
def __init__(self):
"""load config from proxy.ini"""
ConfigParser.RawConfigParser.OPTCRE = re.compile(r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$')
self.CONFIG = ConfigParser.ConfigParser()
# GAEProxy Patch
self.CONFIG.read('/data/data/org.gaeproxy/proxy.ini')
self.LISTEN_IP = self.CONFIG.get('listen', 'ip')
self.LISTEN_PORT = self.CONFIG.getint('listen', 'port')
self.LISTEN_VISIBLE = self.CONFIG.getint('listen', 'visible')
self.GAE_ENABLE = self.CONFIG.getint('gae', 'enable')
self.GAE_APPIDS = self.CONFIG.get('gae', 'appid').replace('.appspot.com', '').split('|')
self.GAE_PASSWORD = self.CONFIG.get('gae', 'password').strip()
self.GAE_PATH = self.CONFIG.get('gae', 'path')
self.GAE_PROFILE = self.CONFIG.get('gae', 'profile')
self.GAE_MULCONN = self.CONFIG.getint('gae', 'mulconn')
self.GAE_DEBUGLEVEL = self.CONFIG.getint('gae', 'debuglevel') if self.CONFIG.has_option('gae', 'debuglevel') else 0
paas_section = 'paas' if self.CONFIG.has_section('paas') else 'php'
self.PAAS_ENABLE = self.CONFIG.getint(paas_section, 'enable')
self.PAAS_LISTEN = self.CONFIG.get(paas_section, 'listen')
self.PAAS_PASSWORD = self.CONFIG.get(paas_section, 'password') if self.CONFIG.has_option(paas_section, 'password') else ''
self.PAAS_FETCHSERVER = self.CONFIG.get(paas_section, 'fetchserver')
if self.CONFIG.has_section('pac'):
# XXX, cowork with GoAgentX
self.PAC_ENABLE = self.CONFIG.getint('pac','enable')
self.PAC_IP = self.CONFIG.get('pac','ip')
self.PAC_PORT = self.CONFIG.getint('pac','port')
self.PAC_FILE = self.CONFIG.get('pac','file').lstrip('/')
self.PAC_UPDATE = self.CONFIG.getint('pac', 'update')
self.PAC_REMOTE = self.CONFIG.get('pac', 'remote')
self.PAC_TIMEOUT = self.CONFIG.getint('pac', 'timeout')
self.PAC_DIRECTS = self.CONFIG.get('pac', 'direct').split('|') if self.CONFIG.get('pac', 'direct') else []
else:
self.PAC_ENABLE = 0
self.PROXY_ENABLE = self.CONFIG.getint('proxy', 'enable')
self.PROXY_HOST = self.CONFIG.get('proxy', 'host')
self.PROXY_PORT = self.CONFIG.getint('proxy', 'port')
self.PROXY_USERNAME = self.CONFIG.get('proxy', 'username')
self.PROXY_PASSWROD = self.CONFIG.get('proxy', 'password')
self.GOOGLE_MODE = self.CONFIG.get(self.GAE_PROFILE, 'mode')
self.GOOGLE_HOSTS = tuple(self.CONFIG.get(self.GAE_PROFILE, 'hosts').split('|'))
self.GOOGLE_SITES = tuple(self.CONFIG.get(self.GAE_PROFILE, 'sites').split('|'))
self.GOOGLE_FORCEHTTPS = frozenset(self.CONFIG.get(self.GAE_PROFILE, 'forcehttps').split('|'))
self.GOOGLE_WITHGAE = frozenset(self.CONFIG.get(self.GAE_PROFILE, 'withgae').split('|'))
self.FETCHMAX_LOCAL = self.CONFIG.getint('fetchmax', 'local') if self.CONFIG.get('fetchmax', 'local') else 3
self.FETCHMAX_SERVER = self.CONFIG.get('fetchmax', 'server')
self.AUTORANGE_HOSTS = tuple(self.CONFIG.get('autorange', 'hosts').split('|'))
self.AUTORANGE_HOSTS_TAIL = tuple(x.rpartition('*')[2] for x in self.AUTORANGE_HOSTS)
self.AUTORANGE_MAXSIZE = self.CONFIG.getint('autorange', 'maxsize')
self.AUTORANGE_WAITSIZE = self.CONFIG.getint('autorange', 'waitsize')
self.AUTORANGE_BUFSIZE = self.CONFIG.getint('autorange', 'bufsize')
assert self.AUTORANGE_BUFSIZE <= self.AUTORANGE_WAITSIZE <= self.AUTORANGE_MAXSIZE
if self.CONFIG.has_section('crlf'):
# XXX, cowork with GoAgentX
self.CRLF_ENABLE = self.CONFIG.getint('crlf', 'enable')
self.CRLF_DNS = self.CONFIG.get('crlf', 'dns')
self.CRLF_SITES = tuple(self.CONFIG.get('crlf', 'sites').split('|'))
self.CRLF_CNAME = dict(x.split('=') for x in self.CONFIG.get('crlf', 'cname').split('|'))
else:
self.CRLF_ENABLE = 0
self.USERAGENT_ENABLE = self.CONFIG.getint('useragent', 'enable')
self.USERAGENT_STRING = self.CONFIG.get('useragent', 'string')
self.LOVE_ENABLE = self.CONFIG.getint('love','enable')
self.LOVE_TIMESTAMP = self.CONFIG.get('love', 'timestamp')
self.LOVE_TIP = [re.sub(r'(?i)\\u([0-9a-f]{4})', lambda m:unichr(int(m.group(1),16)), x) for x in self.CONFIG.get('love','tip').split('|')]
self.HOSTS = dict((k, tuple(v.split('|')) if v else tuple()) for k, v in self.CONFIG.items('hosts'))
self.build_gae_fetchserver()
self.PAAS_FETCH_INFO = dict(((listen.rpartition(':')[0], int(listen.rpartition(':')[-1])), (re.sub(r':\d+$', '', urlparse.urlparse(server).netloc), server)) for listen, server in zip(self.PAAS_LISTEN.split('|'), [re.sub(r'/index\.[^/]+$','/',x) for x in self.PAAS_FETCHSERVER.split('|')]))
def build_gae_fetchserver(self):
"""rebuild gae fetch server config"""
if self.PROXY_ENABLE:
self.GOOGLE_MODE = 'https'
self.GAE_FETCHHOST = '%s.appspot.com' % self.GAE_APPIDS[0]
if not self.PROXY_ENABLE:
# append '?' to url, it can avoid china telicom/unicom AD
self.GAE_FETCHSERVER = '%s://%s%s?' % (self.GOOGLE_MODE, self.GAE_FETCHHOST, self.GAE_PATH)
else:
self.GAE_FETCHSERVER = '%s://%s%s?' % (self.GOOGLE_MODE, random.choice(self.GOOGLE_HOSTS), self.GAE_PATH)
def install_opener(self):
"""install urllib2 opener"""
httplib.HTTPMessage = SimpleMessageClass
if self.PROXY_ENABLE:
proxy = '%s:%s@%s:%d'%(self.PROXY_USERNAME, self.PROXY_PASSWROD, self.PROXY_HOST, self.PROXY_PORT)
handlers = [urllib2.ProxyHandler({'http':proxy,'https':proxy})]
else:
handlers = [urllib2.ProxyHandler({})]
opener = urllib2.build_opener(*handlers)
opener.addheaders = []
urllib2.install_opener(opener)
def info(self):
info = ''
info += '------------------------------------------------------\n'
info += 'GoAgent Version : %s (python/%s pyopenssl/%s)\n' % (__version__, sys.version.partition(' ')[0], (OpenSSL.version.__version__ if OpenSSL else 'Disabled'))
info += 'Listen Address : %s:%d\n' % (self.LISTEN_IP,self.LISTEN_PORT)
info += 'Local Proxy : %s:%s\n' % (self.PROXY_HOST, self.PROXY_PORT) if self.PROXY_ENABLE else ''
info += 'Debug Level : %s\n' % self.GAE_DEBUGLEVEL if self.GAE_DEBUGLEVEL else ''
info += 'GAE Mode : %s\n' % self.GOOGLE_MODE if self.GAE_ENABLE else ''
info += 'GAE Profile : %s\n' % self.GAE_PROFILE
info += 'GAE APPID : %s\n' % '|'.join(self.GAE_APPIDS)
if common.PAAS_ENABLE:
for (ip, port),(fetchhost, fetchserver) in common.PAAS_FETCH_INFO.iteritems():
info += 'PAAS Listen : %s:%d\n' % (ip, port)
info += 'PAAS FetchServer : %s\n' % fetchserver
if common.PAC_ENABLE:
info += 'Pac Server : http://%s:%d/%s\n' % (self.PAC_IP,self.PAC_PORT,self.PAC_FILE)
if common.CRLF_ENABLE:
#http://www.acunetix.com/websitesecurity/crlf-injection.htm
info += 'CRLF Injection : %s\n' % '|'.join(self.CRLF_SITES)
info += '------------------------------------------------------\n'
return info
common = Common()
class MultiplexConnection(object):
"""multiplex tcp connection class"""
retry = 3
timeout = 8
timeout_min = 4
timeout_max = 60
timeout_ack = 0
window = 8
window_min = 4
window_max = 60
window_ack = 0
def __init__(self, hosts, port):
self.socket = None
self._sockets = set([])
self.connect(hosts, port, MultiplexConnection.timeout, MultiplexConnection.window)
def connect(self, hostlist, port, timeout, window):
for i in xrange(MultiplexConnection.retry):
hosts = random.sample(hostlist, window) if len(hostlist) > window else hostlist
logging.debug('MultiplexConnection try connect hosts=%s, port=%d', hosts, port)
socks = []
# multiple connect start here
for host in hosts:
sock = socket.socket(2 if ':' not in host else socket.AF_INET6)
sock.setblocking(0)
#logging.debug('MultiplexConnection connect_ex (%r, %r)', host, port)
err = sock.connect_ex((host, port))
self._sockets.add(sock)
socks.append(sock)
# something happens :D
(_, outs, _) = select.select([], socks, [], timeout)
if outs:
self.socket = outs[0]
self.socket.setblocking(1)
self._sockets.remove(self.socket)
if window > MultiplexConnection.window_min:
MultiplexConnection.window_ack += 1
if MultiplexConnection.window_ack > 10:
MultiplexConnection.window = window - 1
MultiplexConnection.window_ack = 0
logging.info('MultiplexConnection CONNECT port=%s OK 10 times, switch new window=%d', port, MultiplexConnection.window)
if timeout > MultiplexConnection.timeout_min:
MultiplexConnection.timeout_ack += 1
if MultiplexConnection.timeout_ack > 10:
MultiplexConnection.timeout = timeout - 1
MultiplexConnection.timeout_ack = 0
logging.info('MultiplexConnection CONNECT port=%s OK 10 times, switch new timeout=%d', port, MultiplexConnection.timeout)
break
else:
logging.debug('MultiplexConnection Cannot hosts %r:%r, window=%d', hosts, port, window)
else:
# OOOPS, cannot multiple connect
MultiplexConnection.window = min(int(round(window*1.5)), self.window_max)
MultiplexConnection.window_ack = 0
MultiplexConnection.timeout = min(int(round(timeout*1.5)), self.timeout_max)
MultiplexConnection.timeout_ack = 0
logging.warning(r'MultiplexConnection Connect hosts %s:%s fail %d times!', hosts, port, MultiplexConnection.retry)
raise socket.error('MultiplexConnection connect hosts=%s failed' % repr(hosts))
def connect_single(self, hostlist, port, timeout, window):
for host in hostlist:
logging.debug('MultiplexConnection try connect host=%s, port=%d', host, port)
sock = None
try:
sock_family = socket.AF_INET6 if ':' in host else socket.AF_INET
sock = socket.socket(sock_family, socket.SOCK_STREAM)
sock.settimeout(timeout)
sock.connect((host, port))
self.socket = sock
except socket.error:
if sock is not None:
sock.close()
raise
def close(self):
"""close all sockets, otherwise CLOSE_WAIT"""
for sock in self._sockets:
try:
sock.close()
except:
pass
del self._sockets
def socket_create_connection((host, port), timeout=None, source_address=None):
logging.debug('socket_create_connection connect (%r, %r)', host, port)
if host == common.GAE_FETCHHOST:
msg = 'socket_create_connection returns an empty list'
try:
conn = MultiplexConnection(common.GOOGLE_HOSTS, port)
sock = conn.socket
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True)
return sock
except socket.error:
logging.error('socket_create_connection connect fail: (%r, %r)', common.GOOGLE_HOSTS, port)
sock = None
if not sock:
raise socket.error, msg
elif host in common.HOSTS:
msg = 'socket_create_connection returns an empty list'
try:
iplist = common.HOSTS[host]
if not iplist:
iplist = tuple(x[-1][0] for x in socket.getaddrinfo(host, 80))
common.HOSTS[host] = iplist
conn = MultiplexConnection(iplist, port)
sock = conn.socket
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True)
return sock
except socket.error:
logging.error('socket_create_connection connect fail: (%r, %r)', common.HOSTS[host], port)
sock = None
if not sock:
raise socket.error, msg
else:
msg = 'getaddrinfo returns an empty list'
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
if isinstance(timeout, (int, float)):
sock.settimeout(timeout)
if source_address is not None:
sock.bind(source_address)
sock.connect(sa)
return sock
except socket.error:
if sock is not None:
sock.close()
raise socket.error, msg
socket.create_connection = socket_create_connection
def socket_forward(local, remote, timeout=60, tick=2, bufsize=8192, maxping=None, maxpong=None, idlecall=None):
timecount = timeout
try:
while 1:
timecount -= tick
if timecount <= 0:
break
(ins, _, errors) = select.select([local, remote], [], [local, remote], tick)
if errors:
break
if ins:
for sock in ins:
data = sock.recv(bufsize)
if data:
if sock is local:
remote.sendall(data)
timecount = maxping or timeout
else:
local.sendall(data)
timecount = maxpong or timeout
else:
return
else:
if idlecall:
try:
idlecall()
except Exception:
logging.exception('socket_forward idlecall fail')
finally:
idlecall = None
except Exception:
logging.exception('socket_forward error')
raise
finally:
if idlecall:
idlecall()
def dns_resolve(host, dnsserver='8.8.8.8', dnscache=common.HOSTS, dnslock=threading.Lock()):
index = os.urandom(2)
hoststr = ''.join(chr(len(x))+x for x in host.split('.'))
data = '%s\x01\x00\x00\x01\x00\x00\x00\x00\x00\x00%s\x00\x00\x01\x00\x01' % (index, hoststr)
data = struct.pack('!H', len(data)) + data
if host not in dnscache:
with dnslock:
if host not in dnscache:
sock = None
try:
sock = socket.socket(socket.AF_INET6 if ':' in dnsserver else socket.AF_INET)
sock.connect((dnsserver, 53))
sock.sendall(data)
rfile = sock.makefile('rb')
size = struct.unpack('!H', rfile.read(2))[0]
data = rfile.read(size)
iplist = re.findall('\xC0.\x00\x01\x00\x01.{6}(.{4})', data)
iplist = tuple('.'.join(str(ord(x)) for x in s) for s in iplist)
logging.info('dns_resolve(host=%r) return %s', host, iplist)
dnscache[host] = iplist
except socket.error:
logging.exception('dns_resolve(host=%r) fail', host)
finally:
if sock:
sock.close()
return dnscache.get(host, tuple())
_httplib_HTTPConnection_putrequest = httplib.HTTPConnection.putrequest
def httplib_HTTPConnection_putrequest(self, method, url, skip_host=0, skip_accept_encoding=1):
self._output('\r\n\r\n')
return _httplib_HTTPConnection_putrequest(self, method, url, skip_host, skip_accept_encoding)
httplib.HTTPConnection.putrequest = httplib_HTTPConnection_putrequest
class DNSCacheUtil(object):
'''DNSCache module, integrated with GAEProxy'''
cache = {"127.0.0.1": 'localhost'}
@staticmethod
def getHost(address):
if DNSCacheUtil.cache.has_key(address):
return DNSCacheUtil.cache[address]
host = "www.google.com"
if sqlite3 is not None:
try:
conn = sqlite3.connect('/data/data/org.gaeproxy/databases/dnscache.db')
except Exception:
logging.exception('DNSCacheUtil.initConn failed')
conn = None
if conn is not None:
try:
c = conn.cursor()
c.execute("select request from dnsresponse where address = '%s'"
% address)
row = c.fetchone()
if row is not None:
host = row[0]
DNSCacheUtil.cache[address] = host
c.close()
conn.close()
except Exception:
logging.exception('DNSCacheUtil.getHost failed: %s', address)
return host
class CertUtil(object):
'''CertUtil module, based on WallProxy 0.4.0'''
CA = None
CALock = threading.Lock()
subj_alts = \
'DNS: twitter.com, DNS: facebook.com, \
DNS: *.twitter.com, DNS: *.twimg.com, \
DNS: *.akamaihd.net, DNS: *.google.com, \
DNS: *.facebook.com, DNS: *.ytimg.com, \
DNS: *.appspot.com, DNS: *.google.com, \
DNS: *.youtube.com, DNS: *.googleusercontent.com, \
DNS: *.gstatic.com, DNS: *.live.com, \
DNS: *.ak.fbcdn.net, DNS: *.ak.facebook.com, \
DNS: *.android.com, DNS: *.fbcdn.net'
@staticmethod
def readFile(filename):
content = None
with open(filename, 'rb') as fp:
content = fp.read()
return content
@staticmethod
def writeFile(filename, content):
with open(filename, 'wb') as fp:
fp.write(str(content))
@staticmethod
def createKeyPair(type=None, bits=1024):
if type is None:
type = OpenSSL.crypto.TYPE_RSA
pkey = OpenSSL.crypto.PKey()
pkey.generate_key(type, bits)
return pkey
@staticmethod
def createCertRequest(pkey, digest='sha1', **subj):
req = OpenSSL.crypto.X509Req()
subject = req.get_subject()
for k,v in subj.iteritems():
setattr(subject, k, v)
req.set_pubkey(pkey)
req.sign(pkey, digest)
return req
@staticmethod
def createCertificate(req, (issuerKey, issuerCert), serial, (notBefore,
notAfter), digest='sha1', host=None):
cert = OpenSSL.crypto.X509()
cert.set_version(3)
cert.set_serial_number(serial)
cert.gmtime_adj_notBefore(notBefore)
cert.gmtime_adj_notAfter(notAfter)
cert.set_issuer(issuerCert.get_subject())
cert.set_subject(req.get_subject())
cert.set_pubkey(req.get_pubkey())
alts = CertUtil.subj_alts
if host is not None:
alts += ", DNS: %s" % host
cert.add_extensions([OpenSSL.crypto.X509Extension("subjectAltName",
True, alts)])
cert.sign(issuerKey, digest)
return cert
@staticmethod
def loadPEM(pem, type):
handlers = ('load_privatekey', 'load_certificate_request', 'load_certificate')
return getattr(OpenSSL.crypto, handlers[type])(OpenSSL.crypto.FILETYPE_PEM, pem)
@staticmethod
def dumpPEM(obj, type):
handlers = ('dump_privatekey', 'dump_certificate_request', 'dump_certificate')
return getattr(OpenSSL.crypto, handlers[type])(OpenSSL.crypto.FILETYPE_PEM, obj)
@staticmethod
def makeCA():
pkey = CertUtil.createKeyPair(bits=2048)
subj = {'countryName': 'CN', 'stateOrProvinceName': 'Internet',
'localityName': 'Cernet', 'organizationName': 'GoAgent',
'organizationalUnitName': 'GoAgent Root', 'commonName': 'GoAgent CA'}
req = CertUtil.createCertRequest(pkey, **subj)
cert = CertUtil.createCertificate(req, (pkey, req), 0, (0, 60*60*24*7305)) #20 years
return (CertUtil.dumpPEM(pkey, 0), CertUtil.dumpPEM(cert, 2))
@staticmethod
def makeCert(host, (cakey, cacrt), serial):
pkey = CertUtil.createKeyPair()
subj = {'countryName': 'CN', 'stateOrProvinceName': 'Internet',
'localityName': 'Cernet', 'organizationName': host,
'organizationalUnitName': 'GoAgent Branch', 'commonName': host}
req = CertUtil.createCertRequest(pkey, **subj)
cert = CertUtil.createCertificate(req, (cakey, cacrt), serial, (0,
60*60*24*7305), host=host)
return (CertUtil.dumpPEM(pkey, 0), CertUtil.dumpPEM(cert, 2))
# GAEProxy Patch
@staticmethod
def getCertificate(host):
basedir = '/data/data/org.gaeproxy'
keyFile = os.path.join(basedir, 'certs/%s.key' % host)
crtFile = os.path.join(basedir, 'certs/%s.crt' % host)
if os.path.exists(keyFile):
return (keyFile, crtFile)
if OpenSSL is None:
keyFile = os.path.join(basedir, 'CA.key')
crtFile = os.path.join(basedir, 'CA.crt')
return (keyFile, crtFile)
if not os.path.isfile(keyFile):
with CertUtil.CALock:
if not os.path.isfile(keyFile):
logging.info('CertUtil getCertificate for %r', host)
# FIXME: howto generate a suitable serial number?
for serial in (int(hashlib.md5(host).hexdigest(), 16), int(time.time()*100)):
try:
key, crt = CertUtil.makeCert(host, CertUtil.CA, serial)
CertUtil.writeFile(crtFile, crt)
CertUtil.writeFile(keyFile, key)
break
except Exception:
logging.exception('CertUtil.makeCert failed: host=%r, serial=%r', host, serial)
else:
keyFile = os.path.join(basedir, 'CA.key')
crtFile = os.path.join(basedir, 'CA.crt')
return (keyFile, crtFile)
@staticmethod
def checkCA():
#Check CA exists
basedir = '/data/data/org.gaeproxy'
keyFile = os.path.join(basedir, 'CA.key')
crtFile = os.path.join(basedir, 'CA.crt')
if not os.path.exists(keyFile):
if not OpenSSL:
logging.critical('CA.crt is not exist and OpenSSL is disabled, ABORT!')
sys.exit(-1)
key, crt = CertUtil.makeCA()
CertUtil.writeFile(keyFile, key)
CertUtil.writeFile(crtFile, crt)
[os.remove(os.path.join('certs', x)) for x in os.listdir('certs')]
if OpenSSL:
keyFile = os.path.join(basedir, 'CA.key')
crtFile = os.path.join(basedir, 'CA.crt')
cakey = CertUtil.readFile(keyFile)
cacrt = CertUtil.readFile(crtFile)
CertUtil.CA = (CertUtil.loadPEM(cakey, 0), CertUtil.loadPEM(cacrt, 2))
class SimpleLogging(object):
CRITICAL = 50
FATAL = CRITICAL
ERROR = 40
WARNING = 30
WARN = WARNING
INFO = 20
DEBUG = 10
NOTSET = 0
def __init__(self, *args, **kwargs):
self.level = SimpleLogging.INFO
if self.level > SimpleLogging.DEBUG:
self.debug = self.dummy
self.__write = sys.stdout.write
@classmethod
def getLogger(cls, *args, **kwargs):
return cls(*args, **kwargs)
def basicConfig(self, *args, **kwargs):
self.level = kwargs.get('level', SimpleLogging.INFO)
if self.level > SimpleLogging.DEBUG:
self.debug = self.dummy
def log(self, level, fmt, *args, **kwargs):
self.__write('%s - - [%s] %s\n' % (level, time.ctime()[4:-5], fmt%args))
def dummy(self, *args, **kwargs):
pass
def debug(self, fmt, *args, **kwargs):
self.log('DEBUG', fmt, *args, **kwargs)
def info(self, fmt, *args, **kwargs):
self.log('INFO', fmt, *args)
def warning(self, fmt, *args, **kwargs):
self.log('WARNING', fmt, *args, **kwargs)
def warn(self, fmt, *args, **kwargs):
self.log('WARNING', fmt, *args, **kwargs)
def error(self, fmt, *args, **kwargs):
self.log('ERROR', fmt, *args, **kwargs)
def exception(self, fmt, *args, **kwargs):
self.log('ERROR', fmt, *args, **kwargs)
traceback.print_exc(file=sys.stderr)
def critical(self, fmt, *args, **kwargs):
self.log('CRITICAL', fmt, *args, **kwargs)
class SimpleMessageClass(object):
def __init__(self, fp, seekable = 0):
self.dict = dict = {}
self.headers = headers = []
readline = getattr(fp, 'readline', None)
headers_append = headers.append
if readline:
while 1:
line = readline(8192)
if not line or line == '\r\n':
break
key, _, value = line.partition(':')
if value:
headers_append(line)
dict[key.title()] = value.strip()
else:
for key, value in fp:
key = key.title()
dict[key] = value
headers_append('%s: %s\r\n' % (key, value))
def getheader(self, name, default=None):
return self.dict.get(name.title(), default)
def getheaders(self, name, default=None):
return [self.getheader(name, default)]
def addheader(self, key, value):
self[key] = value
def get(self, name, default=None):
return self.dict.get(name.title(), default)
def iteritems(self):
return self.dict.iteritems()
def iterkeys(self):
return self.dict.iterkeys()
def itervalues(self):
return self.dict.itervalues()
def keys(self):
return self.dict.keys()
def values(self):
return self.dict.values()
def items(self):
return self.dict.items()
def __getitem__(self, name):
return self.dict[name.title()]
def __setitem__(self, name, value):
name = name.title()
self.dict[name] = value
headers = self.headers
try:
i = (i for i, line in enumerate(headers) if line.partition(':')[0].title() == name).next()
headers[i] = '%s: %s\r\n' % (name, value)
except StopIteration:
headers.append('%s: %s\r\n' % (name, value))
def __delitem__(self, name):
name = name.title()
del self.dict[name]
headers = self.headers
for i in reversed([i for i, line in enumerate(headers) if line.partition(':')[0].title() == name]):
del headers[i]
def __contains__(self, name):
return name.title() in self.dict
def __len__(self):
return len(self.dict)
def __iter__(self):
return iter(self.dict)
def __str__(self):
return ''.join(self.headers)
def urlfetch(url, payload, method, headers, fetchhost, fetchserver, password=None, dns=None, on_error=None):
errors = []
params = {'url':url, 'method':method, 'headers':headers, 'payload':payload}
logging.debug('urlfetch params %s', params)
if password:
params['password'] = password
if common.FETCHMAX_SERVER:
params['fetchmax'] = common.FETCHMAX_SERVER
if dns:
params['dns'] = dns
params = '&'.join('%s=%s' % (k, binascii.b2a_hex(v)) for k, v in params.iteritems())
for i in xrange(common.FETCHMAX_LOCAL):
try:
logging.debug('urlfetch %r by %r', url, fetchserver)
request = urllib2.Request(fetchserver, zlib.compress(params, 9))
request.add_header('Content-Type', '')
if common.PROXY_ENABLE:
request.add_header('Host', fetchhost)
response = urllib2.urlopen(request)
compressed = response.read(1)
data = {}
if compressed == '0':
data['code'], hlen, clen = struct.unpack('>3I', response.read(12))
data['headers'] = SimpleMessageClass((k, binascii.a2b_hex(v)) for k, _, v in (x.partition('=') for x in response.read(hlen).split('&')))
data['response'] = response
elif compressed == '1':
rawdata = zlib.decompress(response.read())
data['code'], hlen, clen = struct.unpack('>3I', rawdata[:12])
data['headers'] = SimpleMessageClass((k, binascii.a2b_hex(v)) for k, _, v in (x.partition('=') for x in rawdata[12:12+hlen].split('&')))
data['content'] = rawdata[12+hlen:12+hlen+clen]
response.close()
else:
raise ValueError('Data format not match(%s)' % url)
return (0, data)
except Exception as e:
if on_error:
logging.info('urlfetch error=%s on_error=%s', str(e), str(on_error))
data = on_error(e)
if data:
newfetch = (data.get('fetchhost'), data.get('fetchserver'))
if newfetch != (fetchhost, fetchserver):
(fetchhost, fetchserver) = newfetch
sys.stdout.write(common.info())
errors.append(str(e))
time.sleep(i+1)
continue
return (-1, errors)
class GAEProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
skip_headers = frozenset(['Host', 'Vary', 'Via', 'X-Forwarded-For', 'Proxy-Authorization', 'Proxy-Connection', 'Upgrade', 'Keep-Alive'])
SetupLock = threading.Lock()
MessageClass = SimpleMessageClass
DefaultHosts = 'eJxdztsNgDAMQ9GNIvIoSXZjeApSqc3nUVT3ZojakFTR47wSNEhB8qXhorXg+kMjckGtQM9efDKf\n91Km4W+N4M1CldNIYMu+qSVoTm7MsG5E4KPd8apInNUUMo4betRQjg=='
def handle_fetch_error(self, error):
logging.info('handle_fetch_error self.path=%r', self.path)
if isinstance(error, urllib2.HTTPError):
# http error 400/502/504, swith to https
if error.code in (400, 504) or (error.code==502 and common.GAE_PROFILE=='google_cn'):
common.GOOGLE_MODE = 'https'
logging.error('GAE Error(%s) switch to https', error)
# seems that current appid is overqouta, swith to next appid
if error.code == 503:
common.GAE_APPIDS.append(common.GAE_APPIDS.pop(0))
logging.error('GAE Error(%s) switch to appid(%r)', error, common.GAE_APPIDS[0])
# 405 method not allowed, disable CRLF
if error.code == 405:
httplib.HTTPConnection.putrequest = _httplib_HTTPConnection_putrequest
elif isinstance(error, urllib2.URLError):
if error.reason[0] in (11004, 10051, 10060, 'timed out', 10054):
# it seems that google.cn is reseted, switch to https
common.GOOGLE_MODE = 'https'
elif isinstance(error, httplib.HTTPException):
common.GOOGLE_MODE = 'https'
httplib.HTTPConnection.putrequest = _httplib_HTTPConnection_putrequest
else:
logging.warning('GAEProxyHandler.handle_fetch_error Exception %s', error)
return {}
common.build_gae_fetchserver()
return {'fetchhost':common.GAE_FETCHHOST, 'fetchserver':common.GAE_FETCHSERVER}
def fetch(self, url, payload, method, headers):
return urlfetch(url, payload, method, headers, common.GAE_FETCHHOST, common.GAE_FETCHSERVER, password=common.GAE_PASSWORD, on_error=self.handle_fetch_error)
def rangefetch(self, m, data):
m = map(int, m.groups())
if 'range' in self.headers:
content_range = 'bytes %d-%d/%d' % (m[0], m[1], m[2])
req_range = re.search(r'(\d+)?-(\d+)?', self.headers['range'])
if req_range:
req_range = [u and int(u) for u in req_range.groups()]
if req_range[0] is None:
if req_range[1] is not None:
if not (m[1]-m[0]+1==req_range[1] and m[1]+1==m[2]):
return False
if m[2] >= req_range[1]:
content_range = 'bytes %d-%d/%d' % (req_range[1], m[2]-1, m[2])
else:
if req_range[1] is not None:
if not (m[0]==req_range[0] and m[1]==req_range[1]):
return False
if m[2] - 1 > req_range[1]:
content_range = 'bytes %d-%d/%d' % (req_range[0], req_range[1], m[2])
data['headers']['Content-Range'] = content_range
data['headers']['Content-Length'] = m[2]-m[0]
elif m[0] == 0:
data['code'] = 200
data['headers']['Content-Length'] = m[2]
del data['headers']['Content-Range']
self.wfile.write('%s %d %s\r\n%s\r\n' % (self.protocol_version, data['code'], 'OK', data['headers']))
if 'response' in data:
response = data['response']
bufsize = common.AUTORANGE_BUFSIZE
if data['headers'].get('Content-Type', '').startswith('video/'):
bufsize = common.AUTORANGE_WAITSIZE
while 1:
content = response.read(bufsize)
if not content:
response.close()
break
self.wfile.write(content)
bufsize = common.AUTORANGE_BUFSIZE
else:
self.wfile.write(data['content'])
start = m[1] + 1
end = m[2] - 1
failed = 0
logging.info('>>>>>>>>>>>>>>> Range Fetch started(%r)', self.headers.get('Host'))
while start < end:
if failed > 16:
break
self.headers['Range'] = 'bytes=%d-%d' % (start, min(start+common.AUTORANGE_MAXSIZE-1, end))
retval, data = self.fetch(self.path, '', self.command, str(self.headers))
if retval != 0 or data['code'] >= 400:
failed += 1
seconds = random.randint(2*failed, 2*(failed+1))
logging.error('Range Fetch fail %d times, retry after %d secs!', failed, seconds)
time.sleep(seconds)
continue
if 'Location' in data['headers']:
logging.info('Range Fetch got a redirect location:%r', data['headers']['Location'])
self.path = data['headers']['Location']
failed += 1
continue
m = re.search(r'bytes\s+(\d+)-(\d+)/(\d+)', data['headers'].get('Content-Range',''))
if not m:
failed += 1
logging.error('Range Fetch fail %d times, data[\'headers\']=%s', failed, data['headers'])
continue
start = int(m.group(2)) + 1
logging.info('>>>>>>>>>>>>>>> %s %d' % (data['headers']['Content-Range'], end+1))
failed = 0
if 'response' in data:
response = data['response']
while 1:
content = response.read(common.AUTORANGE_BUFSIZE)
if not content:
response.close()
break
self.wfile.write(content)
else:
self.wfile.write(data['content'])
logging.info('>>>>>>>>>>>>>>> Range Fetch ended(%r)', self.headers.get('Host'))
return True
def log_message(self, fmt, *args):
host, port = self.client_address[:2]
sys.stdout.write("%s:%d - - [%s] %s\n" % (host, port, time.ctime()[4:-5], fmt%args))
def send_response(self, code, message=None):
self.log_request(code)
message = message or self.responses.get(code, ('GoAgent Notify',))[0]
self.connection.sendall('%s %d %s\r\n' % (self.protocol_version, code, message))
def end_error(self, code, message=None, data=None):
if not data:
self.send_error(code, message)
else:
self.send_response(code, message)
self.connection.sendall(data)
def setup(self):
if not common.PROXY_ENABLE and common.GAE_PROFILE != 'google_ipv6':
logging.info('resolve common.GOOGLE_HOSTS domian=%r to iplist', common.GOOGLE_HOSTS)
if any(not re.match(r'\d+\.\d+\.\d+\.\d+', x) for x in common.GOOGLE_HOSTS):
with GAEProxyHandler.SetupLock:
if any(not re.match(r'\d+\.\d+\.\d+\.\d+', x) for x in common.GOOGLE_HOSTS):
google_iplist = [host for host in common.GOOGLE_HOSTS if re.match(r'\d+\.\d+\.\d+\.\d+', host)]
google_hosts = [host for host in common.GOOGLE_HOSTS if not re.match(r'\d+\.\d+\.\d+\.\d+', host)]
try:
google_hosts_iplist = [[x[-1][0] for x in socket.getaddrinfo(host, 80)] for host in google_hosts]
need_remote_dns = google_hosts and any(len(iplist)==1 for iplist in google_hosts_iplist)
except socket.gaierror:
need_remote_dns = True
if need_remote_dns:
logging.warning('OOOPS, there are some mistake in socket.getaddrinfo, try remote dns_resolve')
google_hosts_iplist = [list(dns_resolve(host)) for host in google_hosts]
common.GOOGLE_HOSTS = tuple(set(sum(google_hosts_iplist, google_iplist)))
if len(common.GOOGLE_HOSTS) == 0:
logging.error('resolve common.GOOGLE_HOSTS domian to iplist return empty! use default iplist')
common.GOOGLE_HOSTS = zlib.decompress(base64.b64decode(self.DefaultHosts)).split('|')
common.GOOGLE_HOSTS = tuple(x for x in common.GOOGLE_HOSTS if ':' not in x)
logging.info('resolve common.GOOGLE_HOSTS domian to iplist=%r', common.GOOGLE_HOSTS)
if not common.GAE_MULCONN:
MultiplexConnection.connect = MultiplexConnection.connect_single
if not common.GAE_ENABLE:
GAEProxyHandler.do_CONNECT = GAEProxyHandler.do_CONNECT_Direct
GAEProxyHandler.do_METHOD = GAEProxyHandler.do_METHOD_Direct
GAEProxyHandler.do_GET = GAEProxyHandler.do_METHOD
GAEProxyHandler.do_POST = GAEProxyHandler.do_METHOD
GAEProxyHandler.do_PUT = GAEProxyHandler.do_METHOD
GAEProxyHandler.do_DELETE = GAEProxyHandler.do_METHOD
GAEProxyHandler.do_OPTIONS = GAEProxyHandler.do_METHOD
GAEProxyHandler.do_HEAD = GAEProxyHandler.do_METHOD
GAEProxyHandler.setup = BaseHTTPServer.BaseHTTPRequestHandler.setup
BaseHTTPServer.BaseHTTPRequestHandler.setup(self)
def do_CONNECT(self):
host, _, port = self.path.rpartition(':')
if host.endswith(common.GOOGLE_SITES) and host not in common.GOOGLE_WITHGAE:
common.HOSTS[host] = common.GOOGLE_HOSTS
return self.do_CONNECT_Direct()
elif host in common.HOSTS:
return self.do_CONNECT_Direct()
elif common.CRLF_ENABLE and host.endswith(common.CRLF_SITES):
if host not in common.HOSTS:
try:
cname = common.CRLF_CNAME[itertools.ifilter(host.endswith, common.CRLF_CNAME).next()]
except StopIteration:
cname = host
logging.info('crlf dns_resolve(host=%r, cname=%r dnsserver=%r)', host, cname, common.CRLF_DNS)
iplist = tuple(set(sum((dns_resolve(x, common.CRLF_DNS) if not re.match(r'\d+\.\d+\.\d+\.\d+', host) else (host,) for x in cname.split(',')), ())))
common.HOSTS[host] = iplist
return self.do_CONNECT_Direct()
else:
return self.do_CONNECT_Tunnel()
def do_CONNECT_Direct(self):
try:
logging.debug('GAEProxyHandler.do_CONNECT_Directt %s' % self.path)
host, _, port = self.path.rpartition(':')
port = int(port)
idlecall = None
if not common.PROXY_ENABLE:
if host in common.HOSTS:
iplist = common.HOSTS[host]
if not iplist:
common.HOSTS[host] = iplist = tuple(x[-1][0] for x in socket.getaddrinfo(host, 80))
conn = MultiplexConnection(iplist, port)
sock = conn.socket
idlecall=conn.close
else:
sock = socket.create_connection((host, port))
self.log_request(200)
self.connection.sendall('%s 200 Tunnel established\r\n\r\n' % self.protocol_version)
else:
sock = socket.create_connection((common.PROXY_HOST, common.PROXY_PORT))
if host in common.HOSTS:
iplist = common.HOSTS[host]
if not iplist:
common.HOSTS[host] = iplist = tuple(x[-1][0] for x in socket.getaddrinfo(host, 80))
conn = MultiplexConnection(iplist, port)
else:
iplist = (host,)
if 'Host' in self.headers:
del self.headers['Host']
if common.PROXY_USERNAME and 'Proxy-Authorization' not in self.headers:
self.headers['Proxy-Authorization'] = 'Basic %s' + base64.b64encode('%s:%s'%(common.PROXY_USERNAME, common.PROXY_PASSWROD))
data = '\r\n\r\n%s %s:%s %s\r\n%s\r\n' % (self.command, random.choice(iplist), port, self.protocol_version, self.headers)
sock.sendall(data)
socket_forward(self.connection, sock, idlecall=idlecall)
except Exception:
logging.exception('GAEProxyHandler.do_CONNECT_Direct Error')
finally:
try:
sock.close()
del sock
except:
pass
def do_CONNECT_Tunnel(self):
# for ssl proxy
host, _, port = self.path.rpartition(':')
p = "(?:\d{1,3}\.){3}\d{1,3}"
if re.match(p, host) is not None:
host = DNSCacheUtil.getHost(host)
keyFile, crtFile = CertUtil.getCertificate(host)
self.log_request(200)
self.connection.sendall('%s 200 OK\r\n\r\n' % self.protocol_version)
try:
self._realpath = self.path
self._realrfile = self.rfile
self._realwfile = self.wfile
self._realconnection = self.connection
self.connection = ssl.wrap_socket(self.connection, keyFile, crtFile, True)
self.rfile = self.connection.makefile('rb', self.rbufsize)
self.wfile = self.connection.makefile('wb', self.wbufsize)
self.raw_requestline = self.rfile.readline(8192)
if self.raw_requestline == '':
return
self.parse_request()
if self.path[0] == '/':
if 'Host' in self.headers:
self.path = 'https://%s:%s%s' % (self.headers['Host'].partition(':')[0], port or 443, self.path)
else:
self.path = 'https://%s%s' % (self._realpath, self.path)
self.requestline = '%s %s %s' % (self.command, self.path, self.protocol_version)
self.do_METHOD_Tunnel()
except socket.error:
logging.exception('do_CONNECT_Tunnel socket.error')
finally:
try:
self.connection.shutdown(socket.SHUT_WR)
except socket.error:
pass
self.rfile = self._realrfile
self.wfile = self._realwfile
self.connection = self._realconnection
def do_METHOD(self):
host = self.headers['Host']
if host.endswith(common.GOOGLE_SITES) and host not in common.GOOGLE_WITHGAE:
if host in common.GOOGLE_FORCEHTTPS:
self.send_response(301)
self.send_header('Location', self.path.replace('http://', 'https://'))
self.end_headers()
return
common.HOSTS[host] = common.GOOGLE_HOSTS
return self.do_METHOD_Direct()
elif host in common.HOSTS:
return self.do_METHOD_Direct()
elif common.CRLF_ENABLE and host.endswith(common.CRLF_SITES):
if host not in common.HOSTS:
try:
cname = common.CRLF_CNAME[itertools.ifilter(host.endswith, common.CRLF_CNAME).next()]
except StopIteration:
cname = host
logging.info('crlf dns_resolve(host=%r, cname=%r dnsserver=%r)', host, cname, common.CRLF_DNS)
iplist = tuple(set(sum((dns_resolve(x, common.CRLF_DNS) if re.match(r'\d+\.\d+\.\d+\.\d+', host) else (host,) for x in cname.split(',')), ())))
common.HOSTS[host] = iplist
return self.do_METHOD_Direct()
else:
return self.do_METHOD_Tunnel()
def do_METHOD_Direct(self):
scheme, netloc, path, params, query, fragment = urlparse.urlparse(self.path, 'http')
try:
host, _, port = netloc.rpartition(':')
port = int(port)
except ValueError:
host = netloc
port = 80
try:
self.log_request()
idlecall = None
if not common.PROXY_ENABLE:
if host in common.HOSTS:
iplist = common.HOSTS[host]
if not iplist:
common.HOSTS[host] = iplist = tuple(x[-1][0] for x in socket.getaddrinfo(host, 80))
conn = MultiplexConnection(iplist, port)
sock = conn.socket
idlecall = conn.close
else:
sock = socket.create_connection((host, port))
self.headers['Connection'] = 'close'
data = '\r\n\r\n%s %s %s\r\n%s\r\n' % (self.command, urlparse.urlunparse(('', '', path, params, query, '')), self.request_version, ''.join(line for line in self.headers.headers if not line.startswith('Proxy-')))
else:
sock = socket.create_connection((common.PROXY_HOST, common.PROXY_PORT))
if host in common.HOSTS:
host = random.choice(common.HOSTS[host])
else:
host = host
url = urlparse.urlunparse((scheme, host + ('' if port == 80 else ':%d' % port), path, params, query, ''))
self.headers['Host'] = netloc
self.headers['Proxy-Connection'] = 'close'
if common.PROXY_USERNAME and 'Proxy-Authorization' not in self.headers:
self.headers['Proxy-Authorization'] = 'Basic %s' + base64.b64encode('%s:%s'%(common.PROXY_USERNAME, common.PROXY_PASSWROD))
data ='\r\n\r\n%s %s %s\r\n%s\r\n' % (self.command, url, self.request_version, self.headers)
content_length = int(self.headers.get('Content-Length', 0))
if content_length > 0:
data += self.rfile.read(content_length)
sock.sendall(data)
socket_forward(self.connection, sock, idlecall=idlecall)
except Exception:
logging.exception('GAEProxyHandler.do_GET Error')
finally:
try:
sock.close()
del sock
except:
pass
def do_METHOD_Tunnel(self):
headers = self.headers
host = headers.get('Host') or urlparse.urlparse(self.path).netloc.partition(':')[0]
if self.path[0] == '/':
self.path = 'http://%s%s' % (host, self.path)
payload_len = int(headers.get('Content-Length', 0))
if payload_len:
payload = self.rfile.read(payload_len)
else:
payload = ''
if common.USERAGENT_ENABLE:
headers['User-Agent'] = common.USERAGENT_STRING
if 'Range' in headers.dict:
m = re.search('bytes=(\d+)-', headers.dict['Range'])
start = int(m.group(1) if m else 0)
headers['Range'] = 'bytes=%d-%d' % (start, start+common.AUTORANGE_MAXSIZE-1)
logging.info('autorange range=%r match url=%r', headers['Range'], self.path)
elif host.endswith(common.AUTORANGE_HOSTS_TAIL):
try:
pattern = (p for p in common.AUTORANGE_HOSTS if host.endswith(p) or fnmatch.fnmatch(host, p)).next()
logging.debug('autorange pattern=%r match url=%r', pattern, self.path)
m = re.search('bytes=(\d+)-', headers.get('Range', ''))
start = int(m.group(1) if m else 0)
headers['Range'] = 'bytes=%d-%d' % (start, start+common.AUTORANGE_MAXSIZE-1)
except StopIteration:
pass
skip_headers = self.skip_headers
strheaders = ''.join('%s: %s\r\n' % (k, v) for k, v in headers.iteritems() if k not in skip_headers)
retval, data = self.fetch(self.path, payload, self.command, strheaders)
try:
if retval == -1:
return self.end_error(502, str(data))
code = data['code']
headers = data['headers']
self.log_request(code)
if code == 206 and self.command=='GET':
content_range = headers.get('Content-Range') or headers.get('content-range') or ''
m = re.search(r'bytes\s+(\d+)-(\d+)/(\d+)', content_range)
if m and self.rangefetch(m, data):
return
content = '%s %d %s\r\n%s\r\n' % (self.protocol_version, code, self.responses.get(code, ('GoAgent Notify', ''))[0], headers)
self.connection.sendall(content)
try:
self.connection.sendall(data['content'])
except KeyError:
#logging.info('OOPS, KeyError! Content-Type=%r', headers.get('Content-Type'))
response = data['response']
while 1:
content = response.read(common.AUTORANGE_BUFSIZE)
if not content:
response.close()
break
self.connection.sendall(content)
if 'close' == headers.get('Connection',''):
self.close_connection = 1
except socket.error as e:
# Connection closed before proxy return
if e[0] in (10053, errno.EPIPE):
return
class PAASProxyHandler(GAEProxyHandler):
HOSTS = {}
def handle_fetch_error(self, error):
logging.error('PAASProxyHandler handle_fetch_error %s', error)
httplib.HTTPConnection.putrequest = _httplib_HTTPConnection_putrequest
def fetch(self, url, payload, method, headers):
fetchhost, fetchserver = common.PAAS_FETCH_INFO[self.server.server_address]
dns = None
host = self.headers.get('Host')
if host in PAASProxyHandler.HOSTS:
dns = random.choice(tuple(x[-1][0] for x in socket.getaddrinfo(host, 80)))
return urlfetch(url, payload, method, headers, fetchhost, fetchserver, password=common.PAAS_PASSWORD, dns=dns, on_error=self.handle_fetch_error)
def setup(self):
PAASProxyHandler.HOSTS = dict((k, tuple(v.split('|')) if v else None) for k, v in common.CONFIG.items('hosts'))
if common.PROXY_ENABLE:
logging.info('Local Proxy is enable, PAASProxyHandler dont resole DNS')
else:
for fetchhost, _ in common.PAAS_FETCH_INFO.itervalues():
logging.info('PAASProxyHandler.setup check %s is in common.HOSTS', fetchhost)
if fetchhost not in common.HOSTS:
with GAEProxyHandler.SetupLock:
if fetchhost not in common.HOSTS:
try:
logging.info('Resole PAAS fetchserver address.')
common.HOSTS[fetchhost] = tuple(x[-1][0] for x in socket.getaddrinfo(fetchhost, 80))
logging.info('Resole PAAS fetchserver address OK. %s', common.HOSTS[fetchhost])
except Exception:
logging.exception('PAASProxyHandler.setup resolve fail')
PAASProxyHandler.do_CONNECT = GAEProxyHandler.do_CONNECT_Tunnel
PAASProxyHandler.do_GET = GAEProxyHandler.do_METHOD_Tunnel
PAASProxyHandler.do_POST = GAEProxyHandler.do_METHOD_Tunnel
PAASProxyHandler.do_PUT = GAEProxyHandler.do_METHOD_Tunnel
PAASProxyHandler.do_DELETE = GAEProxyHandler.do_METHOD_Tunnel
PAASProxyHandler.do_HEAD = PAASProxyHandler.do_METHOD
PAASProxyHandler.setup = BaseHTTPServer.BaseHTTPRequestHandler.setup
BaseHTTPServer.BaseHTTPRequestHandler.setup(self)
class PacServerHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def _generate_pac(self):
url = common.PAC_REMOTE
logging.info('PacServerHandler._generate_pac url=%r, timeout=%r', url, common.PAC_TIMEOUT)
content = urllib2.urlopen(url, timeout=common.PAC_TIMEOUT).read()
cndatas = re.findall(r'(?i)apnic\|cn\|ipv4\|([0-9\.]+)\|([0-9]+)\|[0-9]+\|a.*', content)
logging.info('PacServerHandler._generate_pac download %s bytes %s items', len(content), len(cndatas))
assert len(cndatas) > 0
cndatas = [(ip, socket.inet_ntoa(struct.pack('!I', (int(n)-1)^0xffffffff))) for ip, n in cndatas]
cndataslist = [[] for i in xrange(256)]
for ip, mask in cndatas:
i = int(ip.partition('.')[0])
cndataslist[i].append([ip, mask])
if common.LISTEN_IP in ('', '0.0.0.0', '::'):
proxy = 'PROXY %s:%d' % (socket.gethostbyname(socket.gethostname()), common.LISTEN_PORT)
else:
proxy = 'PROXY %s:%d' % (common.LISTEN_IP, common.LISTEN_PORT)
PAC_TEMPLATE = '''\
//inspired from https://github.com/Leask/Flora_Pac
function FindProxyForURL(url, host)
{
if (false %s) {
return 'DIRECT';
}
var ip = dnsResolve(host);
if (ip == null) {
return '%s';
}
var lists = %s;
var index = parseInt(ip.split('.', 1)[0], 10);
var list = lists[index];
for (var i in list) {
if (isInNet(ip, list[i][0], list[i][1])) {
return 'DIRECT';
}
}
return '%s';
}'''
directs = '||'.join(['dnsDomainIs(host, "%s")' % x for x in common.PAC_DIRECTS]) if common.PAC_DIRECTS else ''
return PAC_TEMPLATE % (directs, proxy, repr(cndataslist), proxy)
def do_GET(self):
filename = os.path.join(os.path.dirname(__file__), common.PAC_FILE)
if self.path != '/'+common.PAC_FILE or not os.path.isfile(filename):
return self.send_error(404, 'Not Found')
if common.PAC_UPDATE and time.time() - os.path.getmtime(common.PAC_FILE) > 86400:
try:
logging.info('PacServerHandler begin sync remote pac')
content = self._generate_pac()
with open(filename, 'wb') as fp:
fp.write(content)
logging.info('PacServerHandler end sync remote pac')
except Exception:
logging.exception('PacServerHandler sync remote pac failed')
with open(filename, 'rb') as fp:
data = fp.read()
self.send_response(200)
self.send_header('Content-Type', 'application/x-ns-proxy-autoconfig')
self.end_headers()
self.wfile.write(data)
self.wfile.close()
class ProxyAndPacHandler(GAEProxyHandler, PacServerHandler):
def do_GET(self):
if self.path == '/'+common.PAC_FILE:
PacServerHandler.do_GET(self)
else:
GAEProxyHandler.do_METHOD(self)
class LocalProxyServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
daemon_threads = True
allow_reuse_address = True
def try_show_love():
'''If you hate this funtion, please go back to gappproxy/wallproxy'''
if ctypes and os.name == 'nt' and common.LOVE_ENABLE:
SetConsoleTitleW = ctypes.windll.kernel32.SetConsoleTitleW
GetConsoleTitleW = ctypes.windll.kernel32.GetConsoleTitleW
if common.LOVE_TIMESTAMP.strip():
common.LOVE_TIMESTAMP = int(common.LOVE_TIMESTAMP)
else:
common.LOVE_TIMESTAMP = int(time.time())
with open(__config__, 'w') as fp:
common.CONFIG.set('love', 'timestamp', int(time.time()))
common.CONFIG.write(fp)
if time.time() - common.LOVE_TIMESTAMP > 86400 and random.randint(1,10) > 5:
title = ctypes.create_unicode_buffer(1024)
GetConsoleTitleW(ctypes.byref(title), len(title)-1)
SetConsoleTitleW(u'%s %s' % (title.value, random.choice(common.LOVE_TIP)))
with open(__config__, 'w') as fp:
common.CONFIG.set('love', 'timestamp', int(time.time()))
common.CONFIG.write(fp)
def main():
# GAEProxy Patch
# do the UNIX double-fork magic, see Stevens' "Advanced
# Programming in the UNIX Environment" for details (ISBN 0201563177)
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #1 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
# decouple from parent environment
os.chdir("/")
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #2 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
global logging
if logging is None:
sys.modules['logging'] = logging = SimpleLogging()
logging.basicConfig(level=logging.DEBUG if common.GAE_DEBUGLEVEL else logging.INFO, format='%(levelname)s - - %(asctime)s %(message)s', datefmt='[%b %d %H:%M:%S]')
if ctypes and os.name == 'nt':
ctypes.windll.kernel32.SetConsoleTitleW(u'GoAgent v%s' % __version__)
if not common.LOVE_TIMESTAMP.strip():
sys.stdout.write('Double click addto-startup.vbs could add goagent to autorun programs. :)\n')
try_show_love()
if not common.LISTEN_VISIBLE:
ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0)
if common.GAE_APPIDS[0] == 'goagent' and not common.CRLF_ENABLE:
logging.critical('please edit %s to add your appid to [gae] !', __config__)
sys.exit(-1)
CertUtil.checkCA()
common.install_opener()
sys.stdout.write(common.info())
LocalProxyServer.address_family = (socket.AF_INET, socket.AF_INET6)[':' in common.LISTEN_IP]
# GAEProxy Patch
pid = str(os.getpid())
f = open('/data/data/org.gaeproxy/python.pid','a')
f.write(" ")
f.write(pid)
f.close()
if common.PAAS_ENABLE:
for address in common.PAAS_FETCH_INFO:
httpd = LocalProxyServer(address, PAASProxyHandler)
thread.start_new_thread(httpd.serve_forever, ())
if common.PAC_ENABLE and common.PAC_PORT != common.LISTEN_PORT:
httpd = LocalProxyServer((common.PAC_IP,common.PAC_PORT),PacServerHandler)
thread.start_new_thread(httpd.serve_forever,())
if common.PAC_ENABLE and common.PAC_PORT == common.LISTEN_PORT:
httpd = LocalProxyServer((common.LISTEN_IP, common.LISTEN_PORT), ProxyAndPacHandler)
else:
httpd = LocalProxyServer((common.LISTEN_IP, common.LISTEN_PORT), GAEProxyHandler)
httpd.serve_forever()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import wsgiref.handlers
import os
from google.appengine.ext import webapp
from google.appengine.ext import webapp
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext.webapp import template
from google.appengine.api import urlfetch
class Category(db.Model):
cid = db.IntegerProperty()
name = db.StringProperty()
class Books(db.Model):
name = db.StringProperty()
descrption = db.TextProperty()
date = db.DateTimeProperty(auto_now_add=True)
tags = db.ListProperty(db.Key)
category = db.IntegerProperty()
visits = db.IntegerProperty()
download = db.LinkProperty()
class Comments(db.Model):
book = db.ReferenceProperty(Books,collection_name='comments')
author = db.UserProperty()
content = db.StringProperty(multiline=True)
date = db.DateTimeProperty(auto_now_add=True)
class Tags(db.Model):
tag = db.StringProperty()
@property
def entrys(self):
return Entry.gql("where tags = :1" ,self.key())
class SingleBlogHandler(webapp.RequestHandler):
def get(self):
query = db.Query(Entry)
query.filter('')
class BookHandler(webapp.RequestHandler):
def get(self):
entries = Books.all().order('-date').fetch(limit=5)
tags = {}
for entry in entries:
entry.fetched_tags = []
for tag_key in entry.tags:
if not tag_key in tags:
tags[tag_key] = Tags.get(tag_key)
entry.fetched_tags.append(tags[tag_key])
if users.get_current_user():
url = users.create_logout_url(self.request.uri)
url_linktext = 'Logout'
else:
url = users.create_login_url(self.request.uri)
url_linktext = 'Login'
template_values = {
'entries' : entries,
'url' : url,
'url_linktext' : url_linktext,
}
path = os.path.join(os.path.dirname(__file__), 'book.html')
self.response.out.write(template.render(path, template_values))
class AddBookPage(webapp.RequestHandler):
def get(self):
template_values = {
}
path = os.path.join(os.path.dirname(__file__), 'addbook.html')
self.response.out.write(template.render(path,template_values))
class AddBookHandler(webapp.RequestHandler):
def post(self):
entry = Books()
entry.name = self.request.get('name')
entry.description = self.request.get('description')
entry.download = self.request.get('download')
tag_string = self.request.get('tags')
for tag_str in tag_string.split(','):
tag = Tags()
tag.tag = tag_str
tag.put()
if tag.key() not in entry.tags:
entry.tags.append(tag.key())
entry.put()
self.redirect('/')
def main():
application = webapp.WSGIApplication([('/',BookHandler),
('/write',AddBookPage),
('/add',AddBookHandler),
],debug=True)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import wsgiref.handlers
import os
from google.appengine.ext import webapp
from google.appengine.ext import webapp
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext.webapp import template
from google.appengine.api import urlfetch
class Category(db.Model):
cid = db.IntegerProperty()
name = db.StringProperty()
class Books(db.Model):
name = db.StringProperty()
descrption = db.TextProperty()
date = db.DateTimeProperty(auto_now_add=True)
tags = db.ListProperty(db.Key)
category = db.IntegerProperty()
visits = db.IntegerProperty()
download = db.LinkProperty()
class Comments(db.Model):
book = db.ReferenceProperty(Books,collection_name='comments')
author = db.UserProperty()
content = db.StringProperty(multiline=True)
date = db.DateTimeProperty(auto_now_add=True)
class Tags(db.Model):
tag = db.StringProperty()
@property
def entrys(self):
return Entry.gql("where tags = :1" ,self.key())
class SingleBlogHandler(webapp.RequestHandler):
def get(self):
query = db.Query(Entry)
query.filter('')
class BookHandler(webapp.RequestHandler):
def get(self):
entries = Books.all().order('-date').fetch(limit=5)
tags = {}
for entry in entries:
entry.fetched_tags = []
for tag_key in entry.tags:
if not tag_key in tags:
tags[tag_key] = Tags.get(tag_key)
entry.fetched_tags.append(tags[tag_key])
if users.get_current_user():
url = users.create_logout_url(self.request.uri)
url_linktext = 'Logout'
else:
url = users.create_login_url(self.request.uri)
url_linktext = 'Login'
template_values = {
'entries' : entries,
'url' : url,
'url_linktext' : url_linktext,
}
path = os.path.join(os.path.dirname(__file__), 'book.html')
self.response.out.write(template.render(path, template_values))
class AddBookPage(webapp.RequestHandler):
def get(self):
template_values = {
}
path = os.path.join(os.path.dirname(__file__), 'addbook.html')
self.response.out.write(template.render(path,template_values))
class AddBookHandler(webapp.RequestHandler):
def post(self):
entry = Books()
entry.name = self.request.get('name')
entry.description = self.request.get('description')
entry.download = self.request.get('download')
tag_string = self.request.get('tags')
for tag_str in tag_string.split(','):
tag = Tags()
tag.tag = tag_str
tag.put()
if tag.key() not in entry.tags:
entry.tags.append(tag.key())
entry.put()
self.redirect('/')
def main():
application = webapp.WSGIApplication([('/',BookHandler),
('/write',AddBookPage),
('/add',AddBookHandler),
],debug=True)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| Python |
#import sys,os
def main:
return
if __name__ == '__main__':
main()
| Python |
import xmlrpclib
from SimpleXMLRPCServer import SimpleXMLRPCServer
def debug(message):
print "[Debug] " + message
return "OK"
def info(message):
print "[Infor] " + message
server = SimpleXMLRPCServer(("localhost",8888))
print "Listening on port 8888"
server.register_function(debug,"debug")
server.serve_forever() | Python |
import os
import sys
sys.path.append('/var/www/python/application')
os.environ['PYTHON_EGG_CACHE'] = '/var/www/python/application/.python-egg'
def application(environ, start_response):
status = '200 OK'
output = 'path /var/www/python/application'
response_headers = [('Content-type', 'text/plain'),
('Content-Length', str(len(output)))]
start_response(status, response_headers)
return [output]
| Python |
s = i = 0
while True:
term = 1.0 / (i*2+1)
s += term * ((-1)**i)
if term < 1e-6: break
i += 1
print "%.6lf" % s
| Python |
from sys import stdin
a = map(int, stdin.readline().strip().split())
print "%d %d %.3lf" % (min(a), max(a), float(sum(a)) / len(a))
| Python |
from sys import stdin
from math import *
n = int(stdin.readline().strip())
print sum(map(factorial, range(1,n+1))) % (10**6)
| Python |
from sys import stdin
def solve(a, b, c):
for i in range(10, 101):
if i % 3 == a and i % 5 == b and i % 7 == c:
print i
return
print 'No answer'
a, b, c = map(int, stdin.readline().strip().split())
solve(a, b, c)
| Python |
from itertools import product
sol = [a*100+b*10+c for a,b,c in product(range(1,10), range(10), range(10)) if a**3+b**3+c**3 == a*100+b*10+c]
print '\n'.join(map(str, sol))
| Python |
from sys import stdin
n = int(stdin.readline().strip())
count = n*2-1
for i in range(n):
print ' '*i + '#'*count
count -= 2
| Python |
from sys import stdin
n, m = map(int, stdin.readline().strip().split())
print "%.5lf" % sum([1.0/i/i for i in range(n,m+1)])
| Python |
from sys import stdin
print len(stdin.readline().strip())
| Python |
from sys import stdin
from decimal import *
a, b, c = map(int, stdin.readline().strip().split())
getcontext().prec = c
print Decimal(a) / Decimal(b)
| Python |
from itertools import product
from math import *
def issqrt(n):
s = int(floor(sqrt(n)))
return s*s == n
aabb = [a*1100+b*11 for a,b in product(range(1,10),range(10))]
print ' '.join(map(str, filter(issqrt, aabb)))
| Python |
from sys import stdin
data = map(int, stdin.readline().strip().split())
n, m = data[0], data[-1]
data = data[1:-1]
print len(filter(lambda x: x < m, data))
| Python |
for abc in range(123, 329):
big = str(abc) + str(abc*2) + str(abc*3)
if(''.join(sorted(big)) == '123456789'): print abc, abc*2, abc*3
| Python |
from sys import stdin
n = int(stdin.readline().strip())
print "%.3lf" % sum([1.0/x for x in range(1,n+1)])
| Python |
from sys import stdin
def cycle(n):
if n == 1: return 0
elif n % 2 == 1: return cycle(n*3+1) + 1
else: return cycle(n/2) + 1
n = int(stdin.readline().strip())
print cycle(n)
| Python |
from sys import stdin
n, = map(int, stdin.readline().strip().split())
money = n * 95
if money >= 300: money *= 0.85
print "%.2lf" % money
| Python |
from sys import stdin
from math import *
x1, y1, x2, y2 = map(float, stdin.readline().strip().split())
print "%.3lf" % hypot((x1-x2), (y1-y2))
| Python |
from sys import stdin
n, = map(int, stdin.readline().strip().split())
print ["yes", "no"][n % 2]
| Python |
from sys import stdin
n, m = map(int, stdin.readline().strip().split())
a = (4*n-m)/2
b = n-a
if m % 2 == 1 or a < 0 or b < 0: print "No answer"
else: print a, b
| Python |
from sys import stdin
from math import *
n, = map(int, stdin.readline().strip().split())
rad = radians(n)
print "%.3lf %.3lf" % (sin(rad), cos(rad))
| Python |
from sys import stdin
from calendar import isleap
year, = map(int, stdin.readline().strip().split())
if isleap(year): print "yes"
else: print "no"
| Python |
from sys import stdin
a, b = map(int, stdin.readline().strip().split())
print b, a
| Python |
from sys import stdin
f, = map(float, stdin.readline().strip().split())
print "%.3lf" % (5*(f-32)/9)
| Python |
from sys import stdin
a, b, c = map(int, stdin.readline().strip().split())
if a*a + b*b == c*c or a*a + c*c == b*b or b*b + c*c == a*a: print "yes"
elif a + b <= c or a + c <= b or b + c <= a: print "not a triangle"
else: print "no"
| Python |
from sys import stdin
a = map(int, stdin.readline().strip().split())
a.sort()
print a[0], a[1], a[2]
| Python |
from sys import stdin
n, = map(int, stdin.readline().strip().split())
print n*(n+1)/2
| Python |
from sys import stdin
x, = map(float, stdin.readline().strip().split())
print abs(x)
| Python |
from sys import stdin
a, b, c = map(int, stdin.readline().strip().split())
print "%.3lf" % ((a+b+c)/3.0)
| Python |
from sys import stdin
from math import *
r, h = map(float, stdin.readline().strip().split())
print "Area = %.3lf" % (pi*r*r*2 + 2*pi*r*h)
| Python |
from sys import stdin
n = stdin.readline().strip().split()[0]
print '%c%c%c' % (n[2], n[1], n[0])
| Python |
#!/usr/bin/env python
# vim: set filetype=python expandtab tabstop=2 shiftwidth=2 autoindent smartindent:
# -*- coding: utf-8 -*-
#
from google.appengine.ext import db
from google.appengine.api import users
class Categories(db.Expando):
""" categories of different places data model """
name = db.StringProperty(required=True)
subname = db.StringProperty(required=True)
description = db.StringProperty(required=True, multiline=True)
class Place(db.Expando):
""" a single place """
name = db.StringProperty(required=True)
category = db.ReferenceProperty(Categories)
#latitude = db.StringProperty(required=True)
#longitude = db.StringProperty(required=True)
place = db.GeoPtProperty(required=True)
description = db.StringProperty(required=True, multiline=True)
class Event(db.Expando):
""" event happens in a place during particular time """
name = db.StringProperty(required=True)
place = db.ReferenceProperty(Place)
startdate = db.DateTimeProperty(required=True)
stopdate = db.DateTimeProperty(required=True)
description = db.StringProperty(required=True, multiline=True)
def main():
c = Categories(key_name='hotel', name=u'宾馆', subname=u'五星级宾馆', description=u'价格最低500起')
c.put()
p = Place(key_name='dachong', name=u'大冲宾馆', category=c, place="22, 114", description=u'公司对面')
p.put()
e = Event(key_name='chistmas', name=u'圣诞打折', place=p, startdate=datetime.datetime(2007, 8, 20, 10, 10), stopdate=datetime.datetime(2007, 9, 20, 10, 10), description=u'圣诞旅游打折')
e.put()
if __name__ == '__main__':
main()
| Python |
print 'Content-Type: text/plain'
print ''
print 'Hello, world!'
| Python |
#!/usr/bin/env python
# vim: set filetype=python expandtab tabstop=2 shiftwidth=2 autoindent smartindent:
# -*- coding: utf-8 -*-
#
from google.appengine.ext import webapp
import os
# test print envs
class PrintEnvironment(webapp.RequestHandler):
def get(self):
for name in os.environ.keys():
self.response.out.write("%s = %s<br />\n" % (name, os.environ[name]))
| Python |
#!/usr/bin/env python
# vim: set filetype=python expandtab tabstop=2 shiftwidth=2 autoindent smartindent:
# -*- coding: utf-8 -*-
#
import cgi
import datetime
import logging
from google.appengine.ext import db
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api import images
#logging.getLogger().setLevel(logging.DEBUG)
class Greeting(db.Model):
author = db.UserProperty()
content = db.StringProperty(multiline=True)
avatar = db.BlobProperty()
date = db.DateTimeProperty(auto_now_add=True)
class MainPage(webapp.RequestHandler):
def get(self):
self.response.out.write('<html><body>')
query_str = "SELECT * FROM Greeting ORDER BY date DESC LIMIT 10"
greetings = db.GqlQuery (query_str)
for greeting in greetings:
if greeting.author:
self.response.out.write('<b>%s</b> wrote:' % greeting.author.nickname())
else:
self.response.out.write('An anonymous person wrote:')
self.response.out.write("<div><img src='img?img_id=%s'></img>" %
greeting.key())
self.response.out.write(' %s</div>' %
cgi.escape(greeting.content))
self.response.out.write("""
<form action="/sign" enctype="multipart/form-data" method="post">
<div><label>Message:</label></div>
<div><textarea name="content" rows="3" cols="60"></textarea></div>
<div><label>Avatar:</label></div>
<div><input type="file" name="img"/></div>
<div><input type="submit" value="Sign Guestbook"></div>
</form>
</body>
</html>""")
class Image (webapp.RequestHandler):
def get(self):
greeting = db.get(self.request.get("img_id"))
if greeting.avatar:
self.response.headers['Content-Type'] = "image/png"
self.response.out.write(greeting.avatar)
else:
self.response.out.write("No image")
class Guestbook(webapp.RequestHandler):
def post(self):
greeting = Greeting()
if users.get_current_user():
greeting.author = users.get_current_user()
greeting.content = self.request.get("content")
avatar = images.resize(self.request.get("img"), 32, 32)
greeting.avatar = db.Blob(avatar)
greeting.put()
self.redirect('/')
| Python |
#!/usr/bin/env python
# vim: set filetype=python expandtab tabstop=2 shiftwidth=2 autoindent smartindent:
# -*- coding: utf-8 -*-
#
from google.appengine.ext import webapp
from google.appengine.ext import db
import os
class Point(db.Expando):
""" save a point with a comment """
lat = db.StringProperty(required=True)
lng = db.StringProperty(required=True)
comment = db.StringProperty(required=True)
# test print envs
class SavePoint(webapp.RequestHandler):
def get(self):
lat = self.request.get('lat')
lng = self.request.get('lng')
comment = self.request.get('comment')
p = Point(lat=lat, lng=lng, comment=comment)
key = p.put()
self.response.out.write('Point (%s,%s [%s]) Saved with key (%d) Succ!' % (lat, lng, comment, key.id_or_name()))
| Python |
#!/usr/bin/env python
# vim: set filetype=python expandtab tabstop=2 shiftwidth=2 autoindent smartindent:
# -*- coding: utf-8 -*-
#
from google.appengine.ext import db
from google.appengine.api import users
class Categories(db.Expando):
""" categories of different places data model """
name = db.StringProperty(required=True)
subname = db.StringProperty(required=True)
description = db.StringProperty(required=True, multiline=True)
class Place(db.Expando):
""" a single place """
name = db.StringProperty(required=True)
category = db.ReferenceProperty(Categories)
#latitude = db.StringProperty(required=True)
#longitude = db.StringProperty(required=True)
place = db.GeoPtProperty(required=True)
description = db.StringProperty(required=True, multiline=True)
class Event(db.Expando):
""" event happens in a place during particular time """
name = db.StringProperty(required=True)
place = db.ReferenceProperty(Place)
startdate = db.DateTimeProperty(required=True)
stopdate = db.DateTimeProperty(required=True)
description = db.StringProperty(required=True, multiline=True)
def main():
c = Categories(key_name='hotel', name=u'宾馆', subname=u'五星级宾馆', description=u'价格最低500起')
c.put()
p = Place(key_name='dachong', name=u'大冲宾馆', category=c, place="22, 114", description=u'公司对面')
p.put()
e = Event(key_name='chistmas', name=u'圣诞打折', place=p, startdate=datetime.datetime(2007, 8, 20, 10, 10), stopdate=datetime.datetime(2007, 9, 20, 10, 10), description=u'圣诞旅游打折')
e.put()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
# vim: set filetype=python expandtab tabstop=2 shiftwidth=2 autoindent smartindent:
# -*- coding: utf-8 -*-
#
from google.appengine.api import urlfetch
from google.appengine.ext import webapp
# test fetch url
class Fetch(webapp.RequestHandler):
def get(self):
self.response.out.write("""
<html>
<head>Fetch A Url</head>
<body>
<form action="/fetchme" enctype="multipart/form-data" method="post">
<div><label>Plese input a valid url(begin with http):</label></div>
<div><input type="text" name="url"/></div>
<div><input type="submit" value="Fetch me!"></div>
</body>
</html>""")
class Fetchme(webapp.RequestHandler):
def post(self):
url = self.request.get("url")
result = urlfetch.fetch(url)
if result.status_code == 200:
self.response.out.write(result.content)
else :
self.response.out.write(str(result.headers))
| Python |
#!/usr/bin/env python
# vim: set filetype=python expandtab tabstop=2 shiftwidth=2 autoindent smartindent:
# -*- coding: utf-8 -*-
#
import os
import wsgiref.handlers
#import cgi
import datetime
import logging
from google.appengine.api import mail
from google.appengine.api import memcache
from google.appengine.api import urlfetch
from google.appengine.api import users
from google.appengine.api import images
from google.appengine.ext import db
#from google.appengine.ext.db import djangoforms
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp import util
from google.appengine.ext.webapp.util import run_wsgi_app
from django.utils import simplejson
from printenv import PrintEnvironment
from savePoint import SavePoint
from guestbook import *
from fetchurl import *
from jsonrpc import *
logging.getLogger().setLevel(logging.DEBUG)
# path router
application = webapp.WSGIApplication([
('/', MainPage),
('/img', Image),
('/sign', Guestbook),
('/printenv', PrintEnvironment),
('/savepoint', SavePoint),
('/fetch', Fetch),
('/fetchme', Fetchme),
('/rpc', RPCHandler)
], debug=True)
def main():
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
# vim: set filetype=python expandtab tabstop=2 shiftwidth=2 autoindent smartindent:
# -*- coding: utf-8 -*-
#
from google.appengine.ext import webapp
import os
# test print envs
class PrintEnvironment(webapp.RequestHandler):
def get(self):
for name in os.environ.keys():
self.response.out.write("%s = %s<br />\n" % (name, os.environ[name]))
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.