code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
#!/usr/bin/env python
import os
import optparse
import subprocess
import sys
here = os.path.dirname(__file__)
def main():
usage = "usage: %prog [file1..fileN]"
description = """With no file paths given this script will automatically
compress all jQuery-based files of the admin app. Requires the Google Closure
Compiler library and Java version 6 or later."""
parser = optparse.OptionParser(usage, description=description)
parser.add_option("-c", dest="compiler", default="~/bin/compiler.jar",
help="path to Closure Compiler jar file")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose")
(options, args) = parser.parse_args()
compiler = os.path.expanduser(options.compiler)
if not os.path.exists(compiler):
sys.exit("Google Closure compiler jar file %s not found. Please use the -c option to specify the path." % compiler)
if not args:
if options.verbose:
sys.stdout.write("No filenames given; defaulting to admin scripts\n")
args = [os.path.join(here, f) for f in [
"actions.js", "collapse.js", "inlines.js", "prepopulate.js"]]
for arg in args:
if not arg.endswith(".js"):
arg = arg + ".js"
to_compress = os.path.expanduser(arg)
if os.path.exists(to_compress):
to_compress_min = "%s.min.js" % "".join(arg.rsplit(".js"))
cmd = "java -jar %s --js %s --js_output_file %s" % (compiler, to_compress, to_compress_min)
if options.verbose:
sys.stdout.write("Running: %s\n" % cmd)
subprocess.call(cmd.split())
else:
sys.stdout.write("File %s not found. Sure it exists?\n" % to_compress)
if __name__ == '__main__':
main()
| Python |
from django.db import models
from django.conf import settings
class Language(models.Model):
culture = models.CharField(max_length = 5)
name = models.CharField(max_length = 30)
image = models.ImageField(upload_to = 'core/language')
default = models.BooleanField(default = False)
active = models.BooleanField(default = True)
def __unicode__(self):
return self.name
def list_image(self):
return '<img src="%s%s"/>' % (settings.UPLOADS_URL, self.image)
list_image.allow_tags = True
| Python |
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
| Python |
# Create your views here.
| Python |
from core.language.models import Language
from django.contrib import admin
from functions.admin import AdminFunctions
class LanguageAdmin(admin.ModelAdmin, AdminFunctions):
list_display = ('name', 'culture', 'list_image', 'default', 'active', 'actions_button')
ordering = ('-name',)
search_fields = ('name',)
admin.site.register(Language, LanguageAdmin) | Python |
from django.db import models
# Create your models here.
| Python |
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
| Python |
# Create your views here.
| Python |
from datetime import datetime
from django.db import models
from django.conf import settings
import mptt
from multilingual.translation import TranslationModel
from multilingual.manager import MultilingualManager
class Tree(models.Model):
slug = models.SlugField()
image = models.ImageField(upload_to = settings.UPLOADS_DIR+'/core/tree', blank=True, null=True)
parent = models.ForeignKey('self', null=True, blank=True)
link = models.BooleanField(default = True)
created = models.DateTimeField(default = datetime.now)
updated = models.DateTimeField(default = datetime.now)
active = models.BooleanField(default = True)
class Translation(TranslationModel):
name = models.CharField(max_length=255)
class Meta:
ordering = ['tree_id', 'lft']
def __unicode__(self):
# note that you can use name and description fields as usual
try:
return u"%s" % (self.name)
except:
return u"-not-available-"
mptt.register(Tree, order_insertion_by=['slug']) | Python |
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
| Python |
from django.db import models
from django.template import Library
from core.tree.models import Tree
register = Library()
@register.inclusion_tag("admin/tree/tree/change_list_results.html")
def core_result_list(cl):
tree = Tree.objects.all()
results = tree
#for item in tree:
#if item.level == 0:
# results.append(buildTree(item, tree))
return {'cl': cl,
'nodes': results}
def buildTree(array, tree):
result = []
for item in tree:
if item.parent_id == array.id:
result.append(buildTree(item, tree))
else:
result.append(array)
return result | Python |
# Create your views here.
| Python |
from django.contrib import admin
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mptt.admin import MPTTModelAdmin
from mptt.forms import MPTTAdminForm, TreeNodeChoiceField
from multilingual.admin import MultilingualModelAdmin
from core.tree.models import Tree
from functions.admin import AdminFunctions
class CustomMPTTModelAdmin(MultilingualModelAdmin, MPTTModelAdmin, AdminFunctions):
use_prepopulated_fields = {'slug': ('name',)}
list_display = ('name', 'slug', 'created', 'updated', 'link', 'active', 'actions_button')
use_fieldsets = (
(_('Name'), {'fields': ('name',)}),
(_('Options'), {'fields': ('slug', 'image', 'parent',)}),
(_('Advanced options'), {'classes': ('collapse',), 'fields': ('link', 'created', 'active',)}),
)
list_filter = ('link', 'active')
def formfield_for_foreignkey(self, db_field, request, **kwargs):
from mptt.models import MPTTModel
if issubclass(db_field.rel.to, MPTTModel):
return super(admin.ModelAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
return super(admin.ModelAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
admin.site.register(Tree, CustomMPTTModelAdmin) | Python |
"""
This file was generated with the customdashboard management command and
contains the class for the main dashboard.
To activate your index dashboard add the following to your settings.py::
GRAPPELLI_INDEX_DASHBOARD = 'cms.dashboard.CustomIndexDashboard'
"""
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from grappelli.dashboard import modules, Dashboard
from grappelli.dashboard.utils import get_admin_site_name
class CustomIndexDashboard(Dashboard):
"""
Custom index dashboard for www.
"""
def init_with_context(self, context):
site_name = get_admin_site_name(context)
# append a group for "Administration" & "Applications"
self.children.append(modules.Group(
_('Group: Administration & Applications'),
column=1,
collapsible=True,
children = [
modules.AppList(
_('Administration'),
column=1,
collapsible=False,
models=('django.contrib.*',),
),
modules.AppList(
_('Applications'),
column=1,
css_classes=('collapse closed',),
exclude=('django.contrib.*',),
)
]
))
# append an app list module for "Applications"
self.children.append(modules.AppList(
_('AppList: Applications'),
collapsible=True,
column=1,
css_classes=('collapse closed',),
exclude=('django.contrib.*',),
))
# append an app list module for "Administration"
self.children.append(modules.ModelList(
_('ModelList: Administration'),
column=1,
collapsible=False,
models=('django.contrib.*',),
))
# append another link list module for "support".
self.children.append(modules.LinkList(
_('Media Management'),
column=2,
children=[
{
'title': _('FileBrowser'),
'url': '/admin/filebrowser/browse/',
'external': False,
},
]
))
# append another link list module for "support".
self.children.append(modules.LinkList(
_('Support'),
column=2,
children=[
{
'title': _('Django Documentation'),
'url': 'http://docs.djangoproject.com/',
'external': True,
},
{
'title': _('Grappelli Documentation'),
'url': 'http://packages.python.org/django-grappelli/',
'external': True,
},
{
'title': _('Grappelli Google-Code'),
'url': 'http://code.google.com/p/django-grappelli/',
'external': True,
},
]
))
# append a feed module
self.children.append(modules.Feed(
_('Latest Django News'),
column=2,
feed_url='http://www.djangoproject.com/rss/weblog/',
limit=5
))
# append a recent actions module
self.children.append(modules.RecentActions(
_('Recent Actions'),
limit=5,
collapsible=False,
column=3,
))
| Python |
from django.db import models
# Create your models here.
| Python |
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
| Python |
from django import template
register = template.Library()
class SetVarNode(template.Node):
def __init__(self, var_name, var_value):
self.var_name = var_name
self.var_value = var_value
def render(self, context):
try:
value = template.Variable(self.var_value).resolve(context)
except template.VariableDoesNotExist:
value = ""
context[self.var_name] = value
return u""
def set_var(parser, token):
"""
{% set <var_name> = <var_value> %}
"""
parts = token.split_contents()
if len(parts) < 4:
raise template.TemplateSyntaxError("'set' tag must be of the form: {% set <var_name> = <var_value> %}")
return SetVarNode(parts[1], parts[3])
register.tag('set', set_var)
| Python |
# Create your views here.
| Python |
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
LANGUAGES = settings.LANGUAGES
LANG_DICT = dict(LANGUAGES)
def get_fallback_languages():
fallbacks = {}
for lang in LANG_DICT:
fallbacks[lang] = [lang]
for other in LANG_DICT:
if other != lang:
fallbacks[lang].append(other)
return fallbacks
FALLBACK_LANGUAGES = getattr(settings, 'MULTILINGUAL_FALLBACK_LANGUAGES',
get_fallback_languages())
IMPLICIT_FALLBACK = getattr(settings, 'MULTILINGUAL_IMPLICIT_FALLBACK', True)
DEFAULT_LANGUAGE = getattr(settings, 'MULTILINGUAL_DEFAULT_LANGUAGE', LANGUAGES[0][0])
mcp = "multilingual.context_processors.multilingual"
if mcp not in settings.TEMPLATE_CONTEXT_PROCESSORS:
found = ','.join(settings.TEMPLATE_CONTEXT_PROCESSORS)
raise ImproperlyConfigured(
"django-multilingual-ng requires the '%s' context processor. "
"Only found: %s" % (mcp, found)
) | Python |
from django.db import models
from multilingual.query import MultilingualModelQuerySet
from multilingual.languages import *
class MultilingualManager(models.Manager):
"""
A manager for multilingual models.
TO DO: turn this into a proxy manager that would allow developers
to use any manager they need. It should be sufficient to extend
and additionaly filter or order querysets returned by that manager.
"""
def get_query_set(self):
return MultilingualModelQuerySet(self.model)
Manager = MultilingualManager # backwards compat, will be depricated | Python |
"""
Multilingual model support.
This code is put in multilingual.models to make Django execute it
during application initialization.
TO DO: remove it. Right now multilingual must be imported directly
into any file that defines translatable models, so it will be
installed anyway.
This module is here only to make it easier to upgrade from versions
that did not require TranslatableModel.Translation classes to subclass
multilingual.Translation to versions that do.
"""
from translation import install_translation_library
install_translation_library() | Python |
"""
Django-multilingual: a QuerySet subclass for models with translatable
fields.
This file contains the implementation for QSRF Django.
Huge thanks to hubscher.remy for writing this!
"""
from django.db.models.sql.compiler import SQLCompiler
from multilingual.languages import (
get_translation_table_alias,
get_language_code_list,
get_default_language,
get_translated_field_alias)
__ALL__ = ['MultilingualSQLCompiler']
class MultilingualSQLCompiler(SQLCompiler):
def pre_sql_setup(self):
"""
Adds the JOINS and SELECTS for fetching multilingual data.
"""
super(MultilingualSQLCompiler, self).pre_sql_setup()
if not self.query.include_translation_data:
return
opts = self.query.model._meta
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
if hasattr(opts, 'translation_model'):
master_table_name = self.query.join((None, opts.db_table, None, None))
translation_opts = opts.translation_model._meta
trans_table_name = translation_opts.db_table
for language_code in get_language_code_list():
table_alias = get_translation_table_alias(trans_table_name,
language_code)
trans_join = ("LEFT JOIN %s AS %s ON ((%s.master_id = %s.%s) AND (%s.language_code = '%s'))"
% (qn2(translation_opts.db_table),
qn2(table_alias),
qn2(table_alias),
qn(master_table_name),
qn2(opts.pk.column),
qn2(table_alias),
language_code))
self.query.extra_join[table_alias] = trans_join
def get_from_clause(self):
"""
Add the JOINS for related multilingual fields filtering.
"""
result = super(MultilingualSQLCompiler, self).get_from_clause()
if not self.query.include_translation_data:
return result
from_ = result[0]
for join in self.query.extra_join.values():
from_.append(join)
return (from_, result[1])
| Python |
from multilingual.languages import get_default_language
try:
from django.utils.decorators import auto_adapt_to_methods as method_decorator
except ImportError:
from django.utils.decorators import method_decorator
try:
from threading import local
except ImportError:
from django.utils._threading_local import local
_thread_locals = local()
_thread_locals.gll_language_code = None
def is_multilingual_model(model):
"""
Return True if `model` is a multilingual model.
"""
return hasattr(model._meta, 'translation_model')
def _get_language_code():
return getattr(_thread_locals, 'gll_language_code', None)
def _set_language_code(lang):
setattr(_thread_locals, 'gll_language_code', lang)
class GLLError(Exception): pass
class GlobalLanguageLock(object):
"""
The Global Language Lock can be used to force django-multilingual-ng to use
a specific language and not try to fall back.
"""
def lock(self, language_code):
_set_language_code(language_code)
def release(self):
_set_language_code(None)
@property
def language_code(self):
lang_code = _get_language_code()
if lang_code is not None:
return lang_code
raise GLLError("The Global Language Lock is not active")
@property
def is_active(self):
return _get_language_code() is not None
GLL = GlobalLanguageLock()
def gll_unlock_decorator(func):
def _decorated(*args, **kwargs):
if not GLL.is_active:
return func(*args, **kwargs)
language_code = GLL.language_code
GLL.release()
result = func(*args, **kwargs)
GLL.lock(language_code)
return result
_decorated.__name__ = func.__name__
_decorated.__doc__ = func.__doc__
return _decorated
gll_unlock = method_decorator(gll_unlock_decorator) | Python |
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from multilingual.utils import is_multilingual_model
def get_field(cls, model, opts, label, field):
"""
Just like django.contrib.admin.validation.get_field, but knows
about translation models.
"""
trans_model = model._meta.translation_model
try:
(f, lang_id) = trans_model._meta.translated_fields[field]
return f
except KeyError:
# fall back to the old way -- see if model contains the field
# directly
pass
try:
return opts.get_field(field)
except models.FieldDoesNotExist:
raise ImproperlyConfigured("'%s.%s' refers to field '%s' that is " \
"missing from model '%s'." \
% (cls.__name__, label, field, model.__name__))
def validate_admin_registration(cls, model):
"""
Validates a class specified as a model admin.
Right now this means validating prepopulated_fields, as for
multilingual models DM handles them by itself.
"""
if not is_multilingual_model(model):
return
from django.contrib.admin.validation import check_isdict, check_isseq
opts = model._meta
# this is heavily based on django.contrib.admin.validation.
if hasattr(cls, '_dm_prepopulated_fields'):
check_isdict(cls, '_dm_prepopulated_fields', cls.prepopulated_fields)
for field, val in cls._dm_prepopulated_fields.items():
f = get_field(cls, model, opts, 'prepopulated_fields', field)
if isinstance(f, (models.DateTimeField, models.ForeignKey,
models.ManyToManyField)):
raise ImproperlyConfigured("'%s.prepopulated_fields['%s']' "
"is either a DateTimeField, ForeignKey or "
"ManyToManyField. This isn't allowed."
% (cls.__name__, field))
check_isseq(cls, "prepopulated_fields['%s']" % field, val)
for idx, f in enumerate(val):
get_field(cls, model,
opts, "prepopulated_fields['%s'][%d]"
% (f, idx), f)
| Python |
"""
Support for models' internal Translation class.
"""
##TODO: this is messy and needs to be cleaned up
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import signals
from django.db.models.base import ModelBase
from django.utils.translation import get_language
from multilingual.languages import *
from multilingual.exceptions import TranslationDoesNotExist
from multilingual.fields import TranslationForeignKey
from multilingual import manager
from multilingual.utils import GLL
# To Be Depricated
#from multilingual.utils import install_multilingual_modeladmin_new
from new import instancemethod
def translation_save_translated_fields(instance, **kwargs):
"""
Save all the translations of instance in post_save signal handler.
"""
if not hasattr(instance, '_translation_cache'):
return
for l_id, translation in instance._translation_cache.iteritems():
# set the translation ID just in case the translation was
# created while instance was not stored in the DB yet
# note: we're using _get_pk_val here even though it is
# private, since that's the most reliable way to get the value
# on older Django (pk property did not exist yet)
translation.master_id = instance._get_pk_val()
translation.save()
def fill_translation_cache(instance):
"""
Fill the translation cache using information received in the
instance objects as extra fields.
You can not do this in post_init because the extra fields are
assigned by QuerySet.iterator after model initialization.
"""
if hasattr(instance, '_translation_cache'):
# do not refill the cache
return
instance._translation_cache = {}
# unsafed instances cannot have translations
if not instance.pk:
return
for language_code in get_language_code_list():
# see if translation for language_code was in the query
field_alias = get_translated_field_alias('code', language_code)
if getattr(instance, field_alias, None) is not None:
field_names = [f.attname for f in instance._meta.translation_model._meta.fields]
# if so, create a translation object and put it in the cache
field_data = {}
for fname in field_names:
field_data[fname] = getattr(instance,
get_translated_field_alias(fname, language_code))
translation = instance._meta.translation_model(**field_data)
instance._translation_cache[language_code] = translation
# In some situations an (existing in the DB) object is loaded
# without using the normal QuerySet. In such case fallback to
# loading the translations using a separate query.
# Unfortunately, this is indistinguishable from the situation when
# an object does not have any translations. Oh well, we'll have
# to live with this for the time being.
if len(instance._translation_cache.keys()) == 0:
for translation in instance.translations.all():
instance._translation_cache[translation.language_code] = translation
class TranslatedFieldProxy(property):
"""
This is a proxy field to be set onto the main class to proxy to a translation.
"""
def __init__(self, field_name, alias, field, language_code=None,
fallback=False):
self.field_name = field_name
self.field = field
self.admin_order_field = alias
self._language_code = language_code
self.fallback = fallback
@property
def language_code(self):
"""
If _language_code is None we are the _current field, so we use the
currently used language for lookups.
"""
if self._language_code:
return self._language_code
return get_language()
def __get__(self, obj, objtype=None):
if obj is None:
return self
return getattr(obj, 'get_' + self.field_name)(self.language_code,
self.fallback)
def __set__(self, obj, value):
language_code = self.language_code
return getattr(obj, 'set_' + self.field_name)(value, self.language_code)
short_description = property(lambda self: self.field.short_description)
def getter_generator(field_name, short_description):
"""
Generate get_'field name' method for field field_name.
"""
def get_translation_field(cls, language_code=None, fallback=False):
try:
return cls.get_translation(language_code,
fallback=fallback,
field=field_name)
except TranslationDoesNotExist:
return None
get_translation_field.short_description = short_description
return get_translation_field
def setter_generator(field_name):
"""
Generate set_'field name' method for field field_name.
"""
def set_translation_field(cls, value, language_code=None):
setattr(cls.get_translation(language_code, True),
field_name, value)
set_translation_field.short_description = "set " + field_name
return set_translation_field
def get_translation(cls, language_code, create_if_necessary=False,
fallback=False, field=None):
"""
Get a translation instance for the given `language_id_or_code`.
If the translation does not exist:
1. if `create_if_necessary` is True, this function will create one
2. otherwise, if `fallback` is True, this function will search the
list of languages looking for the first existing translation
3. if all of the above fails to find a translation, raise the
TranslationDoesNotExist exception
"""
# fill the cache if necessary
cls.fill_translation_cache()
if language_code is None:
language_code = getattr(cls, '_default_language', None)
if language_code is None:
language_code = get_default_language()
force = False
if GLL.is_active:
language_code = GLL.language_code
force = True
if language_code in cls._translation_cache:
transobj = cls._translation_cache.get(language_code, None)
if field is None:
return transobj
value = getattr(transobj, field)
if value or force or (not fallback):
return value
if create_if_necessary:
new_translation = cls._meta.translation_model(master=cls,
language_code=language_code)
cls._translation_cache[language_code] = new_translation
return new_translation
# only fall backif we're not in 'force' mode (GLL)
elif (not force) and fallback:
for fb_lang_code in get_fallbacks(language_code):
transobj = cls._translation_cache.get(fb_lang_code, None)
if transobj:
if field is None:
return transobj
else:
value = getattr(transobj, field)
if value:
return value
raise TranslationDoesNotExist(language_code)
class TranslationModel(object):
"""
A superclass for translatablemodel.Translation inner classes.
"""
def contribute_to_class(cls, main_cls, name):
"""
Handle the inner 'Translation' class.
"""
# delay the creation of the *Translation until the master model is
# fully created
signals.class_prepared.connect(cls.finish_multilingual_class,
sender=main_cls, weak=False)
# connect the post_save signal on master class to a handler
# that saves translations
signals.post_save.connect(translation_save_translated_fields,
sender=main_cls)
contribute_to_class = classmethod(contribute_to_class)
def create_translation_attrs(cls, main_cls):
"""
Creates get_'field name'(language_code) and set_'field
name'(language_id) methods for all the translation fields.
Adds the 'field name' properties too.
Returns the translated_fields hash used in field lookups, see
multilingual.query. It maps field names to (field,
language_id) tuples.
"""
translated_fields = {}
for fname, field in cls.__dict__.items():
if isinstance(field, models.fields.Field):
translated_fields[fname] = (field, None)
# add get_'fname' and set_'fname' methods to main_cls
getter = getter_generator(fname, getattr(field, 'verbose_name', fname))
setattr(main_cls, 'get_' + fname, getter)
setter = setter_generator(fname)
setattr(main_cls, 'set_' + fname, setter)
# add the 'fname' proxy property that allows reads
# from and writing to the appropriate translation
setattr(main_cls, fname,
TranslatedFieldProxy(fname, fname, field, fallback=True))
# add the 'fname'_any fallback
setattr(main_cls, fname + FALLBACK_FIELD_SUFFIX,
TranslatedFieldProxy(fname, fname, field, fallback=True))
# create the 'fname'_'language_code' proxy properties
for language_code in get_language_code_list():
fname_lng = fname + '_' + language_code.replace('-', '_')
translated_fields[fname_lng] = (field, language_code)
setattr(main_cls, fname_lng,
TranslatedFieldProxy(fname, fname_lng, field,
language_code))
# add the 'fname'_'language_code'_any fallback proxy
setattr(main_cls, fname_lng + FALLBACK_FIELD_SUFFIX,
TranslatedFieldProxy(fname, fname_lng, field,
language_code, fallback=True))
fname_current = fname + '_current'
setattr(main_cls, fname_current,
TranslatedFieldProxy(fname, fname_current, field, None))
setattr(main_cls, fname_current + FALLBACK_FIELD_SUFFIX,
TranslatedFieldProxy(fname, fname_current, field, None, fallback=True))
return translated_fields
create_translation_attrs = classmethod(create_translation_attrs)
def get_unique_fields(cls):
"""
Return a list of fields with "unique" attribute, which needs to
be augmented by the language.
"""
unique_fields = []
for fname, field in cls.__dict__.items():
if isinstance(field, models.fields.Field):
if getattr(field,'unique',False):
try:
field.unique = False
except AttributeError:
# newer Django defines unique as a property
# that uses _unique to store data. We're
# jumping over the fence by setting _unique,
# so this sucks, but this happens early enough
# to be safe.
field._unique = False
unique_fields.append(fname)
return unique_fields
get_unique_fields = classmethod(get_unique_fields)
def finish_multilingual_class(cls, *args, **kwargs):
"""
Create a model with translations of a multilingual class.
"""
main_cls = kwargs['sender']
translation_model_name = main_cls.__name__ + "Translation"
# create the model with all the translatable fields
unique = [('language_code', 'master')]
for f in cls.get_unique_fields():
unique.append(('language_code',f))
class TransMeta:
pass
try:
meta = cls.Meta
except AttributeError:
meta = TransMeta
meta.ordering = ('language_code',)
meta.unique_together = tuple(unique)
meta.app_label = main_cls._meta.app_label
if not hasattr(meta, 'db_table'):
meta.db_table = main_cls._meta.db_table + '_translation'
trans_attrs = cls.__dict__.copy()
trans_attrs['Meta'] = meta
# TODO: increase the length of this field, but to what???
trans_attrs['language_code'] = models.CharField(max_length=15, blank=True,
choices=get_language_choices(),
db_index=True)
related_name = getattr(meta, 'related_name', 'translations')
if hasattr(meta, 'related_name'):
delattr(meta, 'related_name')
edit_inline = True
trans_attrs['master'] = TranslationForeignKey(main_cls, blank=False, null=False,
related_name=related_name,)
trans_attrs['__str__'] = lambda self: ("%s object, language_code=%s"
% (translation_model_name,
self.language_code))
trans_model = ModelBase(translation_model_name, (models.Model,), trans_attrs)
trans_model._meta.translated_fields = cls.create_translation_attrs(main_cls)
trans_model._meta.related_name = related_name
_old_init_name_map = main_cls._meta.__class__.init_name_map
def init_name_map(self):
cache = _old_init_name_map(self)
for name, field_and_lang_id in trans_model._meta.translated_fields.items():
#import sys; sys.stderr.write('TM %r\n' % trans_model)
cache[name] = (field_and_lang_id[0], trans_model, True, False)
return cache
main_cls._meta.init_name_map = instancemethod(init_name_map,
main_cls._meta,
main_cls._meta.__class__)
main_cls._meta.translation_model = trans_model
main_cls._meta.force_language = None
main_cls.Translation = trans_model
main_cls.get_translation = get_translation
main_cls.fill_translation_cache = fill_translation_cache
# Note: don't fill the translation cache in post_init, as all
# the extra values selected by QAddTranslationData will be
# assigned AFTER init()
# signals.post_init.connect(fill_translation_cache,
# sender=main_cls)
finish_multilingual_class = classmethod(finish_multilingual_class)
# The following will be deprecated:
Translation = TranslationModel
def install_translation_library():
# modify ModelBase.__new__ so that it understands how to handle the
# 'Translation' inner class
if getattr(ModelBase, '_multilingual_installed', False):
# don't install it twice
return
_old_new = ModelBase.__new__
def multilingual_modelbase_new(cls, name, bases, attrs):
if 'Translation' in attrs:
if not issubclass(attrs['Translation'], Translation):
raise ValueError, ("%s.Translation must be a subclass "
+ " of multilingual.Translation.") % (name,)
# Make sure that if the class specifies objects then it is
# a subclass of our Manager.
#
# Don't check other managers since someone might want to
# have a non-multilingual manager, but assigning a
# non-multilingual manager to objects would be a common
# mistake.
if ('objects' in attrs) and (not isinstance(attrs['objects'], manager.Manager)):
raise ValueError, ("Model %s specifies translations, " +
"so its 'objects' manager must be " +
"a subclass of multilingual.Manager.") % (name,)
# Change the default manager to multilingual.Manager.
if not 'objects' in attrs:
attrs['objects'] = manager.Manager()
return _old_new(cls, name, bases, attrs)
ModelBase.__new__ = staticmethod(multilingual_modelbase_new)
ModelBase._multilingual_installed = True
# To Be Deprecated
#install_multilingual_modeladmin_new()
# install the library
install_translation_library()
| Python |
import math
import StringIO
import tokenize
from django import template
from django import forms
from django.template import Node, NodeList, Template, Context, resolve_variable
from django.template.loader import get_template, render_to_string
from django.conf import settings
from django.utils.html import escape
from multilingual.languages import (
get_default_language,
get_language_code_list,
get_language_name,
get_language_bidi,
get_language_idx
)
from multilingual.utils import GLL
register = template.Library()
def language_name(language_code):
"""
Return the name of the language with id=language_code
"""
return get_language_name(language_code)
def language_bidi(language_code):
"""
Return whether the language with id=language_code is written right-to-left.
"""
return get_language_bidi(language_code)
def language_for_id(language_id):
return get_language_idx(language_for_id)
class EditTranslationNode(template.Node):
def __init__(self, form_name, field_name, language=None):
self.form_name = form_name
self.field_name = field_name
self.language = language
def render(self, context):
form = resolve_variable(self.form_name, context)
model = form._meta.model
trans_model = model._meta.translation_model
if self.language:
language_code = self.language.resolve(context)
else:
language_code = get_default_language()
real_name = "%s.%s.%s.%s" % (self.form_name,
trans_model._meta.object_name.lower(),
get_language_idx(language_code),
self.field_name)
return str(resolve_variable(real_name, context))
def do_edit_translation(parser, token):
bits = token.split_contents()
if len(bits) not in [3, 4]:
raise template.TemplateSyntaxError, \
"%r tag requires 3 or 4 arguments" % bits[0]
if len(bits) == 4:
language = parser.compile_filter(bits[3])
else:
language = None
return EditTranslationNode(bits[1], bits[2], language)
def reorder_translation_formset_by_language_code(inline_admin_form):
"""
Shuffle the forms in the formset of multilingual model in the
order of their language_ids.
"""
lang_to_form = dict([(form.form.initial['language_id'], form)
for form in inline_admin_form])
return [lang_to_form[language_code] for language_code in
get_language_code_list()]
class GLLNode(template.Node):
def __init__(self, language_code, nodelist):
self.language_code = language_code
self.nodelist = nodelist
def render(self, context):
if self.language_code[0] == self.language_code[-1] and self.language_code[0] in ('"',"'"):
language_code = self.language_code[1:-1]
else:
language_code = template.Variable(self.language_code).resolve(context)
GLL.lock(language_code)
output = self.nodelist.render(context)
GLL.release()
return output
def gll(parser, token):
bits = token.split_contents()
if len(bits) != 2:
raise template.TemplateSyntaxError("gll takes exactly one argument")
language_code = bits[1]
nodelist = parser.parse(('endgll',))
parser.delete_first_token()
return GLLNode(language_code, nodelist)
register.filter(language_for_id)
register.filter(language_name)
register.filter(language_bidi)
register.tag('edit_translation', do_edit_translation)
register.filter(reorder_translation_formset_by_language_code)
register.tag('gll', gll) | Python |
"""
Django-multilingual: a QuerySet subclass for models with translatable
fields.
This file contains the implementation for QSRF Django.
"""
import datetime
from copy import deepcopy
from django.core.exceptions import FieldError
from django.db import connection
from django.db.models.fields import FieldDoesNotExist
from django.db.models.query import QuerySet, Q
from django.db.models.sql.query import Query
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.models.sql.datastructures import (
EmptyResultSet,
Empty,
MultiJoin)
from django.db.models.sql.constants import *
from django.db.models.sql.where import WhereNode, EverythingNode, AND, OR
try:
# handle internal API changes in Django rev. 9700
from django.db.models.sql.where import Constraint
def constraint_tuple(alias, col, field, lookup_type, value):
return (Constraint(alias, col, field), lookup_type, value)
except ImportError:
# backwards compatibility, for Django versions 1.0 to rev. 9699
def constraint_tuple(alias, col, field, lookup_type, value):
return (alias, col, field, lookup_type, value)
from multilingual.languages import (
get_translation_table_alias,
get_language_code_list,
get_default_language,
get_translated_field_alias)
from compiler import MultilingualSQLCompiler
__ALL__ = ['MultilingualModelQuerySet']
class MultilingualQuery(Query):
def __init__(self, model, where=WhereNode):
self.extra_join = {}
self.include_translation_data = True
extra_select = {}
super(MultilingualQuery, self).__init__(model, where=where)
opts = self.model._meta
qn = self.get_compiler(DEFAULT_DB_ALIAS).quote_name_unless_alias
qn2 = self.get_compiler(DEFAULT_DB_ALIAS).connection.ops.quote_name
master_table_name = opts.db_table
translation_opts = opts.translation_model._meta
trans_table_name = translation_opts.db_table
if hasattr(opts, 'translation_model'):
master_table_name = opts.db_table
for language_code in get_language_code_list():
for fname in [f.attname for f in translation_opts.fields]:
table_alias = get_translation_table_alias(trans_table_name,
language_code)
field_alias = get_translated_field_alias(fname,
language_code)
extra_select[field_alias] = qn2(table_alias) + '.' + qn2(fname)
self.add_extra(extra_select, None, None, None, None, None)
self._trans_extra_select_count = len(self.extra_select)
def clone(self, klass=None, **kwargs):
defaults = {
'extra_join': self.extra_join,
'include_translation_data': self.include_translation_data,
}
defaults.update(kwargs)
return super(MultilingualQuery, self).clone(klass=klass, **defaults)
def add_filter(self, filter_expr, connector=AND, negate=False, trim=False,
can_reuse=None, process_extras=True, force_having=False):
"""
Copied from add_filter to generate WHERES for translation fields.
"""
if force_having:
import warnings
warnings.warn("multilingual-ng doesn't support force_having (see Django ticket #11293)")
arg, value = filter_expr
parts = arg.split(LOOKUP_SEP)
if not parts:
raise FieldError("Cannot parse keyword query %r" % arg)
# Work out the lookup type and remove it from 'parts', if necessary.
if len(parts) == 1 or parts[-1] not in self.query_terms:
lookup_type = 'exact'
else:
lookup_type = parts.pop()
# Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all
# uses of None as a query value.
if value is None:
if lookup_type != 'exact':
raise ValueError("Cannot use None as a query value")
lookup_type = 'isnull'
value = True
elif (value == '' and lookup_type == 'exact' and
self.get_compiler(DEFAULT_DB_ALIAS).connection.features.interprets_empty_strings_as_nulls):
lookup_type = 'isnull'
value = True
elif callable(value):
value = value()
opts = self.get_meta()
alias = self.get_initial_alias()
allow_many = trim or not negate
try:
field, target, opts, join_list, last, extra_filters = self.setup_joins(
parts, opts, alias, True, allow_many, can_reuse=can_reuse,
negate=negate, process_extras=process_extras)
except MultiJoin, e:
self.split_exclude(filter_expr, LOOKUP_SEP.join(parts[:e.level]),
can_reuse)
return
#=======================================================================
# Django Mulitlingual NG Specific Code START
#=======================================================================
if hasattr(opts, 'translation_model'):
field_name = parts[-1]
if field_name == 'pk':
field_name = opts.pk.name
translation_opts = opts.translation_model._meta
if field_name in translation_opts.translated_fields.keys():
field, model, direct, m2m = opts.get_field_by_name(field_name)
if model == opts.translation_model:
language_code = translation_opts.translated_fields[field_name][1]
if language_code is None:
language_code = get_default_language()
master_table_name = opts.db_table
trans_table_alias = get_translation_table_alias(
model._meta.db_table, language_code)
new_table = (master_table_name + "__" + trans_table_alias)
self.where.add(constraint_tuple(new_table, field.column, field, lookup_type, value), connector)
return
#=======================================================================
# Django Mulitlingual NG Specific Code END
#=======================================================================
final = len(join_list)
penultimate = last.pop()
if penultimate == final:
penultimate = last.pop()
if trim and len(join_list) > 1:
extra = join_list[penultimate:]
join_list = join_list[:penultimate]
final = penultimate
penultimate = last.pop()
col = self.alias_map[extra[0]][LHS_JOIN_COL]
for alias in extra:
self.unref_alias(alias)
else:
col = target.column
alias = join_list[-1]
while final > 1:
# An optimization: if the final join is against the same column as
# we are comparing against, we can go back one step in the join
# chain and compare against the lhs of the join instead (and then
# repeat the optimization). The result, potentially, involves less
# table joins.
join = self.alias_map[alias]
if col != join[RHS_JOIN_COL]:
break
self.unref_alias(alias)
alias = join[LHS_ALIAS]
col = join[LHS_JOIN_COL]
join_list = join_list[:-1]
final -= 1
if final == penultimate:
penultimate = last.pop()
if (lookup_type == 'isnull' and value is True and not negate and
final > 1):
# If the comparison is against NULL, we need to use a left outer
# join when connecting to the previous model. We make that
# adjustment here. We don't do this unless needed as it's less
# efficient at the database level.
self.promote_alias(join_list[penultimate])
if connector == OR:
# Some joins may need to be promoted when adding a new filter to a
# disjunction. We walk the list of new joins and where it diverges
# from any previous joins (ref count is 1 in the table list), we
# make the new additions (and any existing ones not used in the new
# join list) an outer join.
join_it = iter(join_list)
table_it = iter(self.tables)
join_it.next(), table_it.next()
table_promote = False
join_promote = False
for join in join_it:
table = table_it.next()
if join == table and self.alias_refcount[join] > 1:
continue
join_promote = self.promote_alias(join)
if table != join:
table_promote = self.promote_alias(table)
break
self.promote_alias_chain(join_it, join_promote)
self.promote_alias_chain(table_it, table_promote)
self.where.add(constraint_tuple(alias, col, field, lookup_type, value), connector)
if negate:
self.promote_alias_chain(join_list)
if lookup_type != 'isnull':
if final > 1:
for alias in join_list:
if self.alias_map[alias][JOIN_TYPE] == self.LOUTER:
j_col = self.alias_map[alias][RHS_JOIN_COL]
entry = self.where_class()
entry.add(constraint_tuple(alias, j_col, None, 'isnull', True), AND)
entry.negate()
self.where.add(entry, AND)
break
elif not (lookup_type == 'in' and not value) and field.null:
# Leaky abstraction artifact: We have to specifically
# exclude the "foo__in=[]" case from this handling, because
# it's short-circuited in the Where class.
entry = self.where_class()
entry.add(constraint_tuple(alias, col, None, 'isnull', True), AND)
entry.negate()
self.where.add(entry, AND)
if can_reuse is not None:
can_reuse.update(join_list)
if process_extras:
for filter in extra_filters:
self.add_filter(filter, negate=negate, can_reuse=can_reuse,
process_extras=False)
def _setup_joins_with_translation(self, names, opts, alias,
dupe_multis, allow_many=True,
allow_explicit_fk=False, can_reuse=None,
negate=False, process_extras=True):
"""
This is based on a full copy of Query.setup_joins because
currently I see no way to handle it differently.
TO DO: there might actually be a way, by splitting a single
multi-name setup_joins call into separate calls. Check it.
-- marcin@elksoft.pl
Compute the necessary table joins for the passage through the fields
given in 'names'. 'opts' is the Options class for the current model
(which gives the table we are joining to), 'alias' is the alias for the
table we are joining to. If dupe_multis is True, any many-to-many or
many-to-one joins will always create a new alias (necessary for
disjunctive filters).
Returns the final field involved in the join, the target database
column (used for any 'where' constraint), the final 'opts' value and the
list of tables joined.
"""
joins = [alias]
last = [0]
dupe_set = set()
exclusions = set()
extra_filters = []
for pos, name in enumerate(names):
try:
exclusions.add(int_alias)
except NameError:
pass
exclusions.add(alias)
last.append(len(joins))
if name == 'pk':
name = opts.pk.name
try:
field, model, direct, m2m = opts.get_field_by_name(name)
except FieldDoesNotExist:
for f in opts.fields:
if allow_explicit_fk and name == f.attname:
# XXX: A hack to allow foo_id to work in values() for
# backwards compatibility purposes. If we dropped that
# feature, this could be removed.
field, model, direct, m2m = opts.get_field_by_name(f.name)
break
else:
names = opts.get_all_field_names() + self.aggregate_select.keys()
raise FieldError("Cannot resolve keyword %r into field. "
"Choices are: %s" % (name, ", ".join(names)))
if not allow_many and (m2m or not direct):
for alias in joins:
self.unref_alias(alias)
raise MultiJoin(pos + 1)
#===================================================================
# Django Multilingual NG Specific Code START
#===================================================================
if hasattr(opts, 'translation_model'):
translation_opts = opts.translation_model._meta
if model == opts.translation_model:
language_code = translation_opts.translated_fields[name][1]
if language_code is None:
language_code = get_default_language()
#TODO: check alias
master_table_name = opts.db_table
trans_table_alias = get_translation_table_alias(
model._meta.db_table, language_code)
new_table = (master_table_name + "__" + trans_table_alias)
qn = self.get_compiler(DEFAULT_DB_ALIAS).quote_name_unless_alias
qn2 = self.get_compiler(DEFAULT_DB_ALIAS).connection.ops.quote_name
trans_join = ("JOIN %s AS %s ON ((%s.master_id = %s.%s) AND (%s.language_code = '%s'))"
% (qn2(model._meta.db_table),
qn2(new_table),
qn2(new_table),
qn(master_table_name),
qn2(model._meta.pk.column),
qn2(new_table),
language_code))
self.extra_join[new_table] = trans_join
target = field
continue
#===================================================================
# Django Multilingual NG Specific Code END
#===================================================================
elif model:
# The field lives on a base class of the current model.
# Skip the chain of proxy to the concrete proxied model
proxied_model = get_proxied_model(opts)
for int_model in opts.get_base_chain(model):
if int_model is proxied_model:
opts = int_model._meta
else:
lhs_col = opts.parents[int_model].column
dedupe = lhs_col in opts.duplicate_targets
if dedupe:
exclusions.update(self.dupe_avoidance.get(
(id(opts), lhs_col), ()))
dupe_set.add((opts, lhs_col))
opts = int_model._meta
alias = self.join((alias, opts.db_table, lhs_col,
opts.pk.column), exclusions=exclusions)
joins.append(alias)
exclusions.add(alias)
for (dupe_opts, dupe_col) in dupe_set:
self.update_dupe_avoidance(dupe_opts, dupe_col,
alias)
cached_data = opts._join_cache.get(name)
orig_opts = opts
dupe_col = direct and field.column or field.field.column
dedupe = dupe_col in opts.duplicate_targets
if dupe_set or dedupe:
if dedupe:
dupe_set.add((opts, dupe_col))
exclusions.update(self.dupe_avoidance.get((id(opts), dupe_col),
()))
if process_extras and hasattr(field, 'extra_filters'):
extra_filters.extend(field.extra_filters(names, pos, negate))
if direct:
if m2m:
# Many-to-many field defined on the current model.
if cached_data:
(table1, from_col1, to_col1, table2, from_col2,
to_col2, opts, target) = cached_data
else:
table1 = field.m2m_db_table()
from_col1 = opts.pk.column
to_col1 = field.m2m_column_name()
opts = field.rel.to._meta
table2 = opts.db_table
from_col2 = field.m2m_reverse_name()
to_col2 = opts.pk.column
target = opts.pk
orig_opts._join_cache[name] = (table1, from_col1,
to_col1, table2, from_col2, to_col2, opts,
target)
int_alias = self.join((alias, table1, from_col1, to_col1),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
if int_alias == table2 and from_col2 == to_col2:
joins.append(int_alias)
alias = int_alias
else:
alias = self.join(
(int_alias, table2, from_col2, to_col2),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
joins.extend([int_alias, alias])
elif field.rel:
# One-to-one or many-to-one field
if cached_data:
(table, from_col, to_col, opts, target) = cached_data
else:
opts = field.rel.to._meta
target = field.rel.get_related_field()
table = opts.db_table
from_col = field.column
to_col = target.column
orig_opts._join_cache[name] = (table, from_col, to_col,
opts, target)
alias = self.join((alias, table, from_col, to_col),
exclusions=exclusions, nullable=field.null)
joins.append(alias)
else:
# Non-relation fields.
target = field
break
else:
orig_field = field
field = field.field
if m2m:
# Many-to-many field defined on the target model.
if cached_data:
(table1, from_col1, to_col1, table2, from_col2,
to_col2, opts, target) = cached_data
else:
table1 = field.m2m_db_table()
from_col1 = opts.pk.column
to_col1 = field.m2m_reverse_name()
opts = orig_field.opts
table2 = opts.db_table
from_col2 = field.m2m_column_name()
to_col2 = opts.pk.column
target = opts.pk
orig_opts._join_cache[name] = (table1, from_col1,
to_col1, table2, from_col2, to_col2, opts,
target)
int_alias = self.join((alias, table1, from_col1, to_col1),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
alias = self.join((int_alias, table2, from_col2, to_col2),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
joins.extend([int_alias, alias])
else:
# One-to-many field (ForeignKey defined on the target model)
if cached_data:
(table, from_col, to_col, opts, target) = cached_data
else:
local_field = opts.get_field_by_name(
field.rel.field_name)[0]
opts = orig_field.opts
table = opts.db_table
from_col = local_field.column
to_col = field.column
target = opts.pk
orig_opts._join_cache[name] = (table, from_col, to_col,
opts, target)
alias = self.join((alias, table, from_col, to_col),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
joins.append(alias)
for (dupe_opts, dupe_col) in dupe_set:
try:
self.update_dupe_avoidance(dupe_opts, dupe_col, int_alias)
except NameError:
self.update_dupe_avoidance(dupe_opts, dupe_col, alias)
if pos != len(names) - 1:
if pos == len(names) - 2:
raise FieldError("Join on field %r not permitted. Did you misspell %r for the lookup type?" % (name, names[pos + 1]))
else:
raise FieldError("Join on field %r not permitted." % name)
return field, target, opts, joins, last, extra_filters
def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True,
allow_explicit_fk=False, can_reuse=None, negate=False,
process_extras=True):
if not self.include_translation_data:
return super(MultilingualQuery, self).setup_joins(names, opts, alias,
dupe_multis, allow_many,
allow_explicit_fk,
can_reuse, negate,
process_extras)
else:
return self._setup_joins_with_translation(names, opts, alias, dupe_multis,
allow_many, allow_explicit_fk,
can_reuse, negate, process_extras)
def get_count(self, using=None):
# optimize for the common special case: count without any
# filters
if ((not (self.select or self.where ))#or self.extra_where))
and self.include_translation_data):
obj = self.clone(extra_select = {},
extra_join = {},
include_translation_data = False)
return obj.get_count(using)
else:
return super(MultilingualQuery, self).get_count(using)
def get_compiler(self, using=None, connection=None):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
return MultilingualSQLCompiler(self, connection, using)
class MultilingualModelQuerySet(QuerySet):
"""
A specialized QuerySet that knows how to handle translatable
fields in ordering and filtering methods.
"""
def __init__(self, model=None, query=None, using=None):
query = query or MultilingualQuery(model)
super(MultilingualModelQuerySet, self).__init__(model, query, using)
self._field_name_cache = None
def __deepcopy__(self, memo):
"""
Deep copy of a QuerySet doesn't populate the cache
"""
obj_dict = deepcopy(self.__dict__, memo)
obj_dict['_iter'] = None
#=======================================================================
# Django Multilingual NG Specific Code START
#=======================================================================
obj = self.__class__(self.model) # add self.model as first argument
#=======================================================================
# Django Multilingual NG Specific Code END
#=======================================================================
obj.__dict__.update(obj_dict)
return obj
def for_language(self, language_code):
"""
Set the default language for all objects returned with this
query.
"""
clone = self._clone()
clone._default_language = language_code
return clone
def iterator(self):
"""
Add the default language information to all returned objects.
"""
default_language = getattr(self, '_default_language', None)
for obj in super(MultilingualModelQuerySet, self).iterator():
obj._default_language = default_language
yield obj
def _clone(self, klass=None, **kwargs):
"""
Override _clone to preserve additional information needed by
MultilingualModelQuerySet.
"""
clone = super(MultilingualModelQuerySet, self)._clone(klass, **kwargs)
clone._default_language = getattr(self, '_default_language', None)
return clone
def order_by(self, *field_names):
if hasattr(self.model._meta, 'translation_model'):
trans_opts = self.model._meta.translation_model._meta
new_field_names = []
for field_name in field_names:
prefix = ''
if field_name[0] == '-':
prefix = '-'
field_name = field_name[1:]
field_and_lang = trans_opts.translated_fields.get(field_name)
if field_and_lang:
field, language_code = field_and_lang
if language_code is None:
language_code = getattr(self, '_default_language', None)
real_name = get_translated_field_alias(field.attname,
language_code)
new_field_names.append(prefix + real_name)
else:
new_field_names.append(prefix + field_name)
return super(MultilingualModelQuerySet, self).extra(order_by=new_field_names)
else:
return super(MultilingualModelQuerySet, self).order_by(*field_names)
def _get_all_field_names(self):
if self._field_name_cache is None:
self._field_name_cache = self.model._meta.get_all_field_names() + ['pk']
return self._field_name_cache
def values(self, *fields):
for field in fields:
if field not in self._get_all_field_names():
raise NotImplementedError("Multilingual fields cannot be queried using queryset.values(...)")
return super(MultilingualModelQuerySet, self).values(*fields)
def values_list(self, *fields, **kwargs):
for field in fields:
if field not in self._get_all_field_names():
raise NotImplementedError("Multilingual fields cannot be queried using queryset.values(...)")
return super(MultilingualModelQuerySet, self).values_list(*fields, **kwargs)
| Python |
from django.db import models
class TranslationForeignKey(models.ForeignKey):
"""
"""
def south_field_triple(self):
from south.modelsinspector import introspector
field_class = "django.db.models.fields.related.ForeignKey"
args, kwargs = introspector(self)
return (field_class, args, kwargs) | Python |
from multilingual.languages import get_language_code_list, get_default_language_code
from multilingual.settings import LANG_DICT
from django.conf import settings
def multilingual(request):
"""
Returns context variables containing information about available languages.
"""
codes = sorted(get_language_code_list())
return {'LANGUAGE_CODES': codes,
'LANGUAGE_CODES_AND_NAMES': [(c, LANG_DICT.get(c, c)) for c in codes],
'DEFAULT_LANGUAGE_CODE': get_default_language_code(),
'ADMIN_MEDIA_URL': settings.ADMIN_MEDIA_PREFIX}
| Python |
class TranslationDoesNotExist(Exception):
"""
The requested translation does not exist
"""
pass
class LanguageDoesNotExist(Exception):
"""
The requested language does not exist
"""
pass
| Python |
from django.core.management.base import AppCommand
from django.db import models
from django.utils.importlib import import_module
from django.conf import settings
from django.db import connection
from django.core.management import call_command
from multilingual.utils import is_multilingual_model
from multilingual.languages import get_language_choices
from inspect import isclass
from south.db import db
def get_code_by_id(lid):
return settings.LANGUAGES[lid-1][0]
class Command(AppCommand):
"""
Migrate the data from an id base translation table to a code based table.
"""
def handle(self, *args, **kwargs):
if self.are_you_sure():
super(Command, self).handle(*args, **kwargs)
print self.style.HTTP_SUCCESS('Done.')
else:
print self.style.NOTICE('Aborted.')
def are_you_sure(self):
n = self.style.NOTICE
e = self.style.ERROR
print e("WARNING!") + n(" This command will ") + e("delete") + n(""" data from your database! All language_id
columns in all multilingual tables of the apps you specified will be deleted.
Their values will be converted to the new language_code format. Please make a
backup of your database before running this command.""")
answer = raw_input("Are you sure you want to continue? [yes/no]\n")
if answer.lower() == 'yes':
return True
elif answer.lower() == 'no':
return False
while True:
answer = raw_input("Please answer with either 'yes' or 'no'\n")
if answer.lower() == 'yes':
return True
elif answer.lower() == 'no':
return False
def handle_app(self, app, **options):
appname = app.__name__
print 'handling app %s' % appname
for obj in [getattr(app, name) for name in dir(app)]:
if not isclass(obj):
continue
if not issubclass(obj, models.Model):
continue
if not is_multilingual_model(obj):
continue
print 'altering model %s' % obj
table = obj._meta.translation_model._meta.db_table
db.debug = True
# do this in a transaction
db.start_transaction()
# first add the column with nullable values, and no index
lc_field = models.CharField(max_length=15, blank=True, null=True)
db.add_column(table, 'language_code', lc_field)
# migrate the model
print 'migrating data'
# do the conversion server-side
# all modern RDBMSs support the case statement
update_sql = "UPDATE %s SET language_code = (CASE language_id %s END)" % (table,
' '.join(
"WHEN %d THEN '%s'" % (lid, get_code_by_id(lid))
for lid in range(1, len(settings.LANGUAGES) + 1)
)
)
db.execute(update_sql)
print 'deleting language_id column'
db.delete_unique(table, ['language_id', 'master_id'])
db.delete_column(table, 'language_id')
print 'setting up constraints and indices'
# alter the column to set not null
lc_field.null = False
db.alter_column(table, 'language_code', lc_field)
## we don't really need this indexed. all queries should hit the unique index
#db.create_index(table, ['language_code'])
# and create a unique index for master & language
db.create_unique(table, ['language_code', 'master_id'])
# south might fail to commit if we don't do it explicitly
db.commit_transaction()
| Python |
"""
Django-multilingual: language-related settings and functions.
"""
# Note: this file did become a mess and will have to be refactored
# after the configuration changes get in place.
#retrieve language settings from settings.py
from multilingual import settings
from django.utils.translation import ugettext_lazy as _
from multilingual.exceptions import LanguageDoesNotExist
try:
from threading import local
except ImportError:
from django.utils._threading_local import local
thread_locals = local()
def get_language_count():
return len(settings.LANGUAGES)
def get_language_name(language_code):
return settings.LANG_DICT[language_code]
def get_language_bidi(language_code):
return language_code in settings.LANGUAGES_BIDI
def get_language_code_list():
return settings.LANG_DICT.keys()
def get_language_choices():
return settings.LANGUAGES
def set_default_language(language_code):
"""
Set the default language for the whole translation mechanism.
"""
thread_locals.DEFAULT_LANGUAGE = language_code
def get_default_language():
"""
Return the language code set by set_default_language.
"""
return getattr(thread_locals, 'DEFAULT_LANGUAGE',
settings.DEFAULT_LANGUAGE)
get_default_language_code = get_default_language
def _to_db_identifier(name):
"""
Convert name to something that is usable as a field name or table
alias in SQL.
For the time being assume that the only possible problem with name
is the presence of dashes.
"""
return name.replace('-', '_')
def get_translation_table_alias(translation_table_name, language_code):
"""
Return an alias for the translation table for a given language_code.
Used in SQL queries.
"""
return (translation_table_name
+ '_'
+ _to_db_identifier(language_code))
def get_language_idx(language_code):
return get_language_code_list().index(language_code)
def get_translated_field_alias(field_name, language_code):
"""
Return an alias for field_name field for a given language_code.
Used in SQL queries.
"""
return ('_trans_'
+ field_name
+ '_' + _to_db_identifier(language_code))
def get_fallbacks(language_code):
fallbacks = settings.FALLBACK_LANGUAGES.get(language_code, [])
if len(language_code) != 2 and settings.IMPLICIT_FALLBACK:
if not language_code[:2] in fallbacks:
fallbacks.insert(0, language_code[:2])
if language_code is not None and language_code not in fallbacks:
fallbacks.insert(0, language_code)
return fallbacks
FALLBACK_FIELD_SUFFIX = '_any' | Python |
"""
Django-multilingual-ng: multilingual model support for Django 1.2.
Note about version numbers:
- uneven minor versions are considered unstable releases
- even minor versions are considered stable releases
"""
#VERSION = ('0', '1', '44')
#__version__ = '.'.join(VERSION)
import warnings
class LazyInit(object):
VERSION = ('0', '1', '45')
__version__ = '.'.join(VERSION)
__deprecated__ = {
'models': ('multilingual.models', None),
'TranslationDoesNotExist': ('multilingual.exceptions', 'TranslationDoesNotExist'),
'LanguageDoesNotExist': ('multilingual.exceptions', 'LanguageDoesNotExist'),
'set_default_language': ('multilingual.languages', 'set_default_language'),
'get_default_language': ('multilingual.languages', 'get_default_language'),
'get_language_code_list': ('multilingual.languages', 'get_language_code_list'),
'FALLBACK_LANGUAGES': ('multilingual.settings', 'FALLBACK_LANGUAGES'),
'Translation': ('multilingual.translation', 'TranslationModel'),
'MultilingualModelAdmin': ('multilingual.admin', 'MultilingualModelAdmin'),
'MultilingualInlineAdmin': ('multilingual.admin', 'MultilingualInlineAdmin'),
'ModelAdmin': ('multilingual.admin', 'MultilingualModelAdmin'),
'Manager': ('multilingual.manager', 'MultilingualManager'),
}
__newnames__ = {
'Translation': 'TranslationModel',
'ModelAdmin': 'MultilingualModelAdmin',
'Manager': 'MultilingualManager',
}
__modules_cache__ = {}
__objects_cache__ = {}
def __init__(self, real):
self.__real__ = real
def __getattr__(self, attr):
if not attr in self.__deprecated__:
return getattr(self.__real__, attr)
if attr in self.__objects_cache__:
return self.__objects_cache__[attr]
return self._load(attr)
def _import(self, modname):
if not hasattr(self, '_importlib'):
mod = __import__('django.utils.importlib', fromlist=['django', 'utils'])
self._importlib = mod
return self._importlib.import_module(modname)
def _load(self, attr):
modname, objname = self.__deprecated__[attr]
if not modname in self.__modules_cache__:
self.__modules_cache__[modname] = self._import(modname)
obj = self.__modules_cache__[modname]
if objname is not None:
obj = getattr(obj, objname)
if attr in self.__newnames__:
self._warn_newname(attr)
self._warn_deprecated(attr, modname, objname)
self.__objects_cache__[attr] = obj
return obj
def _warn_newname(self, attr):
new = self.__newnames__[attr]
warnings.warn("The name '%s' is deprecated in favor of '%s'" % (attr, new), DeprecationWarning)
def _warn_deprecated(self, attr, modname, objname):
if objname:
msg = "'multilingual.%s' is deprecated in favor of '%s.%s'" % (attr, modname, objname)
else:
msg = "'multilingual.%s' is deprecated in favor of '%s'" % (attr, modname)
warnings.warn(msg, DeprecationWarning)
import sys
sys.modules[__name__] = LazyInit(sys.modules[__name__])
#
#try:
# """
# WARNING: All these names imported here WILL BE DEPRECATED!
# """
# from multilingual import models
# from multilingual.exceptions import TranslationDoesNotExist, LanguageDoesNotExist
# from multilingual.languages import (set_default_language, get_default_language,
# get_language_code_list)
# from multilingual.settings import FALLBACK_LANGUAGES
# from multilingual.translation import Translation
# from multilingual.admin import MultilingualModelAdmin, MultilingualInlineAdmin
# from multilingual.manager import Manager
# ModelAdmin = MultilingualModelAdmin
#except ImportError:
# pass
| Python |
from django.utils.translation import get_language
from multilingual.exceptions import LanguageDoesNotExist
from multilingual.languages import set_default_language
class DefaultLanguageMiddleware(object):
"""
Binds DEFAULT_LANGUAGE_CODE to django's currently selected language.
The effect of enabling this middleware is that translated fields can be
accessed by their name; i.e. model.field instead of model.field_en.
"""
def process_request(self, request):
assert hasattr(request, 'session'), "The DefaultLanguageMiddleware \
middleware requires session middleware to be installed. Edit your \
MIDDLEWARE_CLASSES setting to insert \
'django.contrib.sessions.middleware.SessionMiddleware'."
try:
set_default_language(get_language())
except LanguageDoesNotExist:
# Try without the territory suffix
set_default_language(get_language()[:2])
| Python |
from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
from multilingual.translation import Translation as TranslationBase
from multilingual.exceptions import TranslationDoesNotExist
from multilingual.manager import MultilingualManager
class MultilingualFlatPage(models.Model):
# non-translatable fields first
url = models.CharField(_('URL'), max_length=100, db_index=True)
enable_comments = models.BooleanField(_('enable comments'))
template_name = models.CharField(_('template name'), max_length=70, blank=True,
help_text=_("Example: 'flatpages/contact_page.html'. If this isn't provided, the system will use 'flatpages/default.html'."))
registration_required = models.BooleanField(_('registration required'), help_text=_("If this is checked, only logged-in users will be able to view the page."))
sites = models.ManyToManyField(Site)
objects = MultilingualManager()
# And now the translatable fields
class Translation(TranslationBase):
"""
The definition of translation model.
The multilingual machinery will automatically add these to the
Category class:
* get_title(language_id=None)
* set_title(value, language_id=None)
* get_content(language_id=None)
* set_content(value, language_id=None)
* title and content properties using the methods above
"""
title = models.CharField(_('title'), max_length=200)
content = models.TextField(_('content'), blank=True)
class Meta:
db_table = 'multilingual_flatpage'
verbose_name = _('multilingual flat page')
verbose_name_plural = _('multilingual flat pages')
ordering = ('url',)
def __unicode__(self):
# note that you can use name and description fields as usual
try:
return u"%s -- %s" % (self.url, self.title)
except TranslationDoesNotExist:
return u"-not-available-"
def get_absolute_url(self):
return self.url
| Python |
from django.conf.urls.defaults import *
urlpatterns = patterns('multilingual.flatpages.views',
(r'^(?P<url>.*)$', 'multilingual_flatpage'),
)
| Python |
from multilingual.flatpages.models import MultilingualFlatPage
from django.template import loader, RequestContext
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, HttpResponseRedirect
from django.conf import settings
from django.core.xheaders import populate_xheaders
from django.utils.safestring import mark_safe
from django.utils.translation import get_language
import multilingual
DEFAULT_TEMPLATE = 'flatpages/default.html'
def multilingual_flatpage(request, url):
"""
Multilingual flat page view.
Models: `multilingual.flatpages.models`
Templates: Uses the template defined by the ``template_name`` field,
or `flatpages/default.html` if template_name is not defined.
Context:
flatpage
`flatpages.flatpages` object
"""
if not url.endswith('/') and settings.APPEND_SLASH:
return HttpResponseRedirect("%s/" % request.path)
if not url.startswith('/'):
url = "/" + url
f = get_object_or_404(MultilingualFlatPage, url__exact=url, sites__id__exact=settings.SITE_ID)
# If registration is required for accessing this page, and the user isn't
# logged in, redirect to the login page.
if f.registration_required and not request.user.is_authenticated():
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(request.path)
# Serve the content in the language defined by the Django translation module
# if possible else serve the default language.
f._default_language = get_language()
if f.template_name:
t = loader.select_template((f.template_name, DEFAULT_TEMPLATE))
else:
t = loader.get_template(DEFAULT_TEMPLATE)
# To avoid having to always use the "|safe" filter in flatpage templates,
# mark the title and content as already safe (since they are raw HTML
# content in the first place).
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
c = RequestContext(request, {
'flatpage': f,
})
response = HttpResponse(t.render(c))
populate_xheaders(request, response, MultilingualFlatPage, f.id)
return response
| Python |
from multilingual.flatpages.views import multilingual_flatpage
from django.http import Http404
from django.conf import settings
class FlatpageFallbackMiddleware(object):
def process_response(self, request, response):
if response.status_code != 404:
return response # No need to check for a flatpage for non-404 responses.
try:
return multilingual_flatpage(request, request.path_info)
# Return the original response if any errors happened. Because this
# is a middleware, we can't assume the errors will be caught elsewhere.
except Http404:
return response
except:
if settings.DEBUG:
raise
return response
| Python |
from django import forms
from django.contrib import admin
from multilingual.flatpages.models import MultilingualFlatPage
from django.utils.translation import ugettext_lazy as _
from multilingual.admin import MultilingualModelAdmin, MultilingualModelAdminForm
class MultilingualFlatpageForm(MultilingualModelAdminForm):
url = forms.RegexField(label=_("URL"), max_length=100, regex=r'^[-\w/]+$',
help_text = _("Example: '/about/contact/'. Make sure to have leading"
" and trailing slashes."),
error_message = _("This value must contain only letters, numbers,"
" underscores, dashes or slashes."))
class Meta:
model = MultilingualFlatPage
class MultilingualFlatPageAdmin(MultilingualModelAdmin):
form = MultilingualFlatpageForm
use_fieldsets = (
(None, {'fields': ('title', 'url', 'sites', 'content')}),
(_('Advanced options'), {'classes': ('collapse',), 'fields': ('enable_comments', 'registration_required', 'template_name')}),
)
list_display = ('url', 'title')
list_filter = ('sites', 'enable_comments', 'registration_required')
search_fields = ('url', 'title')
admin.site.register(MultilingualFlatPage, MultilingualFlatPageAdmin)
| Python |
"""Admin suppor for inlines
Peter Cicman, Divio GmbH, 2008
"""
from django.utils.text import capfirst, get_text_list
from django.contrib.admin.util import flatten_fieldsets
from django.http import HttpResponseRedirect
from django.utils.encoding import force_unicode
import re
from copy import deepcopy
from django.conf import settings
from django import forms
from django.contrib import admin
from django.db.models import Model
from django.forms.util import ErrorList, ValidationError
from django.forms.models import BaseInlineFormSet, ModelFormMetaclass
from django.utils.translation import ugettext as _
from django.template.loader import find_template
from django.template import TemplateDoesNotExist
from multilingual.languages import get_default_language
from multilingual.utils import GLL
MULTILINGUAL_PREFIX = '_ml__trans_'
MULTILINGUAL_INLINE_PREFIX = '_ml__inline_trans_'
def gll(func):
def wrapped(cls, request, *args, **kwargs):
cls.use_language = request.GET.get('lang', request.GET.get('language', get_default_language()))
GLL.lock(cls.use_language)
resp = func(cls, request, *args, **kwargs)
GLL.release()
return resp
wrapped.__name__ = func.__name__
wrapped.__doc__ = func.__doc__
return wrapped
def standard_get_fill_check_field(stdopts):
if hasattr(stdopts, 'translation_model'):
opts = stdopts.translation_model._meta
for field in opts.fields:
if field.name in ('language_code', 'master'):
continue
if not (field.blank or field.null):
return field.name
return None
def relation_hack(form, fields, prefix=''):
opts = form.instance._meta
localm2m = [m2m.attname for m2m in opts.local_many_to_many]
externalfk = [obj.field.related_query_name() for obj in opts.get_all_related_objects()]
externalm2m = [m2m.get_accessor_name() for m2m in opts.get_all_related_many_to_many_objects()]
for name, db_field in fields:
full_name = '%s%s' % (prefix, name)
if full_name in form.fields:
value = getattr(form.instance, name, '')
# check for (local) ForeignKeys
if isinstance(value, Model):
value = value.pk
# check for (local) many to many fields
elif name in localm2m:
value = value.all()
# check for (external) ForeignKeys
elif name in externalfk:
value = value.all()
# check for (external) many to many fields
elif name in externalm2m:
value = value.all()
form.fields[full_name].initial = value
class MultilingualInlineModelForm(forms.ModelForm):
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False, instance=None):
"""
Fill initial ML Fields
"""
super(MultilingualInlineModelForm, self).__init__(data, files, auto_id,
prefix, initial, error_class, label_suffix, empty_permitted, instance)
# only read initial data if the object already exists, not if we're adding it!
if self.instance.pk:
relation_hack(self, get_translated_fields(self.instance), MULTILINGUAL_INLINE_PREFIX)
class MultilingualInlineFormSet(BaseInlineFormSet):
def get_queryset(self):
if self.queryset is not None:
qs = self.queryset
else:
qs = self.model._default_manager.get_query_set()
if not qs.ordered:
qs = qs.order_by(self.model._meta.pk.name)
if self.max_num > 0:
_queryset = qs[:self.max_num]
else:
_queryset = qs
return _queryset
def save_new(self, form, commit=True):
"""
NOTE: save_new method is completely overridden here, there's no
other way to pretend double save otherwise. Just assign translated data
to object
"""
kwargs = {self.fk.get_attname(): self.instance.pk}
new_obj = self.model(**kwargs)
self._prepare_multilingual_object(new_obj, form)
return forms.save_instance(form, new_obj, exclude=[self._pk_field.name], commit=commit)
def save_existing(self, form, instance, commit=True):
"""
NOTE: save_new method is completely overridden here, there's no
other way to pretend double save otherwise. Just assign translated data
to object
"""
self._prepare_multilingual_object(instance, form)
return forms.save_instance(form, instance, exclude=[self._pk_field.name], commit=commit)
def _prepare_multilingual_object(self, obj, form):
opts = obj._meta
for realname, fieldname in self.ml_fields.items():
field = opts.get_field_by_name(realname)[0]
m = re.match(r'^%s(?P<field_name>.*)$' % MULTILINGUAL_INLINE_PREFIX, fieldname)
if m:
field.save_form_data(self.instance, form.cleaned_data[fieldname])
setattr(obj, realname, getattr(self.instance, realname.rsplit('_', 1)[0]))
class MultilingualInlineAdmin(admin.TabularInline):
formset = MultilingualInlineFormSet
form = MultilingualInlineModelForm
template = 'admin/multilingual/edit_inline/tabular.html'
# css class added to inline box
inline_css_class = None
use_language = None
fill_check_field = None
#TODO: add some nice template
def __init__(self, parent_model, admin_site):
super(MultilingualInlineAdmin, self).__init__(parent_model, admin_site)
if hasattr(self, 'use_fields'):
# go around admin fields structure validation
self.fields = self.use_fields
def get_formset(self, request, obj=None, **kwargs):
FormSet = super(MultilingualInlineAdmin, self).get_formset(request, obj, **kwargs)
FormSet.use_language = GLL.language_code
FormSet.ml_fields = {}
for name, field in get_translated_fields(self.model, GLL.language_code):
fieldname = '%s%s' % (MULTILINGUAL_INLINE_PREFIX, name)
FormSet.form.base_fields[fieldname] = self.formfield_for_dbfield(field, request=request)
FormSet.ml_fields[name] = fieldname
return FormSet
def queryset(self, request):
"""
Filter objects which don't have a value in this language
"""
qs = super(MultilingualInlineAdmin, self).queryset(request)
# Don't now what the hell I was thinking here, but this code breaks stuff:
#
# checkfield = self.get_fill_check_field()
# if checkfield is not None:
# kwargs = {str('%s_%s__isnull' % (checkfield, GLL.language_code)): False}
# from django.db.models.fields import CharField
# if isinstance(self.model._meta.translation_model._meta.get_field_by_name(checkfield)[0], CharField):
# kwargs[str('%s_%s__gt' % (checkfield, GLL.language_code))] = ''
# return qs.filter(**kwargs)
return qs.filter(translations__language_code=GLL.language_code).distinct()
def get_fill_check_field(self):
if self.fill_check_field is None:
self.fill_check_field = standard_get_fill_check_field(self.model._meta)
return self.fill_check_field
class MultilingualModelAdminForm(forms.ModelForm):
# for rendering / saving multilingual fields connecte to model, takes place
# when admin per language is ussed
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False, instance=None):
"""
Fill up initial ML Fields
"""
super(MultilingualModelAdminForm, self).__init__(data, files, auto_id, prefix,
initial, error_class, label_suffix,
empty_permitted, instance)
# only try to fill intial data if we are not adding an object!
if self.instance.pk:
fields = [(f, getattr(self.instance, "%s_%s" % (f, GLL.language_code), '')) for f in self.ml_fields]
relation_hack(self, fields)
def clean(self):
cleaned_data = super(MultilingualModelAdminForm, self).clean()
self.validate_ml_unique()
return cleaned_data
def validate_ml_unique(self):
form_errors = []
if not hasattr(self.instance._meta, 'translation_model'):
return
for check in self.instance._meta.translation_model._meta.unique_together[:]:
lookup_kwargs = {'language_code': GLL.language_code}
for field_name in check:
#local_name = "%s_%s" % (field_name, self.use_language)
if self.cleaned_data.get(field_name) is not None:
lookup_kwargs[field_name] = self.cleaned_data.get(field_name)
if len(check) == 2 and 'master' in check and 'language_code' in check:
continue
qs = self.instance._meta.translation_model.objects.filter(**lookup_kwargs)
if self.instance.pk is not None:
qs = qs.exclude(master=self.instance.pk)
if qs.count():
model_name = capfirst(self.instance._meta.verbose_name)
field_labels = []
for field_name in check:
if field_name == "language_code":
field_labels.append(_("language"))
elif field_name == "master":
continue
else:
field_labels.append(self.instance._meta.translation_model._meta.get_field_by_name(field_name)[0].verbose_name)
field_labels = get_text_list(field_labels, _('and'))
form_errors.append(
_(u"%(model_name)s with this %(field_label)s already exists.") % \
{'model_name': unicode(model_name),
'field_label': unicode(field_labels)}
)
if form_errors:
# Raise the unique together errors since they are considered
# form-wide.
raise ValidationError(form_errors)
def save(self, commit=True):
self._prepare_multilingual_object(self.instance, self)
return super(MultilingualModelAdminForm, self).save(commit)
def _prepare_multilingual_object(self, obj, form):
opts = self.instance._meta
for name in self.ml_fields:
field = opts.get_field_by_name(name)[0]
# respect save_form_data
field.save_form_data(self.instance, form.cleaned_data[name])
setattr(obj, "%s_%s" % (name, GLL.language_code), getattr(self.instance, name))
class MultilingualModelAdmin(admin.ModelAdmin):
# use special template to render tabs for languages on top
change_form_template = "admin/multilingual/change_form.html"
form = MultilingualModelAdminForm
_multilingual_model_admin = True
use_language = None
fill_check_field = None
_use_hacks = ['fieldsets', 'prepopulated_fields', 'readonly_fields']
class Media:
css = {
'all': ('%smultilingual/admin/css/style.css' % settings.MEDIA_URL,)
}
def __init__(self, model, admin_site):
for attr in self._use_hacks:
if hasattr(self, 'use_%s' % attr):
setattr(self, attr, getattr(self, 'use_%s' % attr))
super(MultilingualModelAdmin, self).__init__(model, admin_site)
def get_fill_check_field(self):
if self.fill_check_field is None:
self.fill_check_field = standard_get_fill_check_field(self.model._meta)
return self.fill_check_field
def get_form(self, request, obj=None, **kwargs):
# assign language to inlines, so they now how to render
for inline in self.inline_instances:
if isinstance(inline, MultilingualInlineAdmin):
inline.use_language = GLL.language_code
Form = super(MultilingualModelAdmin, self).get_form(request, obj, **kwargs)
Form.ml_fields = {}
for name, field in get_default_translated_fields(self.model):
if not field.editable:
continue
form_field = self.formfield_for_dbfield(field, request=request)
local_name = "%s_%s" % (name, GLL.language_code)
Form.ml_fields[name] = form_field
Form.base_fields[name] = form_field
Form.use_language = GLL.language_code
return Form
def placeholder_plugin_filter(self, request, queryset):
"""
This is only used on models which use placeholders from the django-cms
"""
if not request:
return queryset
if GLL.is_active:
return queryset.filter(language=GLL.language_code)
return queryset
@gll
def change_view(self, *args, **kwargs):
return super(MultilingualModelAdmin, self).change_view(*args, **kwargs)
@gll
def add_view(self, *args, **kwargs):
return super(MultilingualModelAdmin, self).add_view(*args, **kwargs)
@gll
def delete_view(self, *args, **kwargs):
return super(MultilingualModelAdmin, self).delete_view(*args, **kwargs)
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
# add context variables
filled_languages = []
fill_check_field = self.get_fill_check_field()
if obj and fill_check_field is not None:
from django.db.models.fields import CharField
kwargs = {'%s__isnull' % fill_check_field:False}
if isinstance(self.model._meta.translation_model._meta.get_field_by_name(fill_check_field)[0], CharField):
kwargs['%s__gt' % fill_check_field] = ''
filled_languages = [t[0] for t in obj.translations.filter(**kwargs).values_list('language_code')]
context.update({
'current_language_index': GLL.language_code,
'current_language_code': GLL.language_code,
'filled_languages': filled_languages,
'old_template': self.get_old_template(),
})
return super(MultilingualModelAdmin, self).render_change_form(request, context, add, change, form_url, obj)
def get_old_template(self):
opts = self.model._meta
app_label = opts.app_label
search_templates = [
"admin/%s/%s/change_form.html" % (app_label, opts.object_name.lower()),
"admin/%s/change_form.html" % app_label,
"admin/change_form.html"
]
for template in search_templates:
try:
find_template(template)
return template
except TemplateDoesNotExist:
pass
def response_change(self, request, obj):
# because save & continue - so it shows the same language
if request.POST.has_key("_continue"):
opts = obj._meta
msg = _('The %(name)s "%(obj)s" was changed successfully.') % {'name': force_unicode(opts.verbose_name), 'obj': force_unicode(obj)}
self.message_user(request, msg + ' ' + _("You may edit it again below."))
lang, path = request.GET.get('language', get_default_language()), request.path
if lang:
lang = "language=%s" % lang
if request.REQUEST.has_key('_popup'):
path += "?_popup=1" + "&%s" % lang
else:
path += "?%s" % lang
return HttpResponseRedirect(path)
return super(MultilingualModelAdmin, self).response_change(request, obj)
def get_translated_fields(model, language=None):
meta = model._meta
if not hasattr(meta, 'translated_fields'):
if hasattr(meta, 'translation_model'):
meta = meta.translation_model._meta
else:
return
# returns all the translatable fields, except of the default ones
if not language:
for name, (field, non_default) in meta.translated_fields.items():
if non_default:
yield name, field
else:
# if language is defined return fields in the same order, like they are defined in the
# translation class
for field in meta.fields:
if field.primary_key:
continue
name = field.name + "_%s" % language
field = meta.translated_fields.get(name, None)
if field:
yield name, field[0]
def get_default_translated_fields(model):
if hasattr(model._meta, 'translation_model'):
for name, (field, non_default) in model._meta.translation_model._meta.translated_fields.items():
if not non_default:
yield name, field
| Python |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
import os
config_1 = {
'database':{
'type':'postgresql',
'user':'postgres',
'passwd':'postgres',
'db':'test',
'port':5433,
'host':'localhost',
'charset':'utf8'
}
}
config_2 = {
'database':{
'type':'sqlite3',
'user':'',
'passwd':'',
'db':'model.db',
'port':'',
'host':'localhost',
'charset':'utf8'
}
}
config_3 = {
'database':{
'type':'mysql',
'user':'root',
'passwd':'root',
'db':'test',
'port':3306,
'host':'localhost',
'charset':'utf8'
}
}
config_4 = {
'database':{
'type':'mongodb',
'user':'',
'passwd':'',
'db':'model',
'port':'',
'host':'localhost',
'charset':'utf8'
}
}
config = config_3
class ForeignKey(object):
def __init__(self, model, *args, **kwargs):
self.pk = None
self.foreignModel = model
self.table, self.column = self.foreignModel.split('.')
self.primary_key = kwargs.get('primary_key', self.column)
def format(self):
return ' int'
def get_key(self):
return self.primary_key
def __set__(self, key, value):
setattr(self, key, value)
def __call__(self):
return globals()[self.table]().filter(**{self.column:self.pk})
class Relation(object):
def __init__(self, model, *args, **kwargs):
self.pk = None
self.relationModel = model
self.primary_key = kwargs.get('primary_key', 'id')
self.table, self.column = self.relationModel.split('.')
def get_key(self):
return self.primary_key
def __set__(self, key, value):
setattr(self, key, value)
def __call__(self):
return globals()[self.table]().filter(**{self.column:self.pk})
class MysqlBase(object):
def __init__(self, *args, **kwargs):
import MySQLdb
import MySQLdb.cursors
self.__tablename__ = None
self.objectManager = {}
self.connection = MySQLdb.connect(
host = config['database']['host'],
user = config['database']['user'],
passwd = config['database']['passwd'],
db = config['database']['db'],
port = config['database']['port'],
charset = config['database']['charset'],
cursorclass = MySQLdb.cursors.DictCursor
)
self.cursor = self.connection.cursor()
def save(self):
sql = "insert into "+self.__tablename__+" ("+",".join(self.objectManager.keys())+") values ("+",".join(["'%s'" % i for i in self.objectManager.values()])+")"
self.execute(sql)
return self.cursor.lastrowid
def filter(self, *args, **kwargs):
query = ''
if args and len(args) > 0:
query += "and".join([i for i in args]) if len(args) > 1 else " ".join([i for i in args])
if kwargs and len(kwargs) > 0:
if query:
query += ' and '
query += " and".join(['%s=%s' % (key, value) for key, value in kwargs.iteritems()]) if len(kwargs) > 1 \
else " ".join(['%s=%s' % (key, value) for key, value in kwargs.iteritems()])
if query:
print "select * from "+self.__tablename__+" where "+ query +""
self.cursor.execute("select * from "+self.__tablename__+" where "+ query +"")
return self.cursor.fetchall()
def get(self, id):
self.cursor.execute("select * from "+self.__tablename__+" where id=%s" % id)
return self.cursor.fetchone()
def all(self):
self.cursor.execute("select * from "+self.__tablename__+"")
return self.cursor.fetchall()
def create(self):
sql = []
for key in dir(self):
if isinstance(getattr(self, key), (Field, ForeignKey)):
sql.append(key + getattr(self, key).format())
sql = ",".join(sql)
sql = "CREATE TABLE %s (%s)" % (self.__tablename__, sql)
self.execute(sql)
def delete(self, *args, **kwargs):
sql = "delete from "+self.__tablename__+" where "+" ".join(['%s=%s' % (key, value) for key, value in kwargs.iteritems()])+""
self.execute(sql)
def execute(self, sql):
self.cursor.execute(sql)
def primary_key(self, sql):
return sql +" auto_increment PRIMARY KEY"
class SqliteBase(object):
def __init__(self, *args, **kwargs):
import sqlite3 as sqlite
self.__tablename__ = None
self.objectManager = {}
self.connection = sqlite.connect(config['database']['db'])
self.connection.row_factory = self.dict_factory
self.cursor = self.connection.cursor()
def dict_factory(self, cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def save(self):
sql = "insert into "+self.__tablename__+" ("+",".join(self.objectManager.keys())+") values ("+",".join(["'%s'" % i for i in self.objectManager.values()])+")"
self.execute(sql)
return self.cursor.lastrowid
def filter(self, *args, **kwargs):
self.cursor.execute("select * from "+self.__tablename__+" where "+" and".join(['%s=%s' % (key, value) for key, value in kwargs.iteritems()])+"")
return self.cursor.fetchall()
def get(self, id):
self.cursor.execute("select * from "+self.__tablename__+" where id=%s" % id)
return self.cursor.fetchone()
def all(self):
self.cursor.execute("select * from "+self.__tablename__+"")
return self.cursor.fetchall()
def delete(self, *args, **kwargs):
sql = "delete from "+self.__tablename__+" where "+" ".join(['%s=%s' % (key, value) for key, value in kwargs.iteritems()])+""
self.execute(sql)
def execute(self, sql):
self.cursor.execute(sql)
self.connection.commit()
def primary_key(self, sql):
return sql + " PRIMARY KEY autoincrement"
class PostgresqlBase(object):
def __init__(self, *args, **kwargs):
import psycopg2
import psycopg2.extras
self.__tablename__ = None
self.objectManager = {}
self.connection = psycopg2.connect(
"dbname='%s' user='%s' host='%s' password='%s' port='%s' " % (
config['database']['db'],
config['database']['user'],
config['database']['host'],
config['database']['passwd'],
config['database']['port']
)
);
self.cursor = self.connection.cursor(cursor_factory = psycopg2.extras.RealDictCursor)
def save(self):
sql = "insert into "+self.__tablename__+" ("+",".join(self.objectManager.keys())+") values ("+",".join(["'%s'" % i for i in self.objectManager.values()])+") RETURNING id"
self.execute(sql)
one = self.cursor.fetchone()
if len(one) > 0:
return one.get('id')
def filter(self, *args, **kwargs):
self.cursor.execute("select * from "+self.__tablename__+" where "+" and".join(['%s=%s' % (key, value) for key, value in kwargs.iteritems()])+"")
return self.cursor.fetchall()
def get(self, id):
self.cursor.execute("select * from "+self.__tablename__+" where id=%s" % id)
return self.cursor.fetchone()
def all(self):
self.cursor.execute("select * from "+self.__tablename__+"")
return self.cursor.fetchall()
def create(self):
sql = []
for key in dir(self):
if isinstance(getattr(self, key), (Field, ForeignKey)):
sql.append(key + getattr(self, key).format())
sql = ",".join(sql)
sql = "CREATE TABLE %s (%s)" % (self.__tablename__, sql)
self.execute(sql)
def delete(self, *args, **kwargs):
sql = "delete from "+self.__tablename__+" where "+" ".join(['%s=%s' % (key, value) for key, value in kwargs.iteritems()])+""
self.execute(sql)
def execute(self, sql):
self.cursor.execute(sql)
self.connection.commit()
def primary_key(self, sql):
return " serial PRIMARY KEY"
class MongoBase(object):
def __init__(self, *args, **kwargs):
from pymongo import Connection
self.__tablename__ = None
self.objectManager = {}
self.connection = Connection()
self.db = self.connection[config['database']['db']]
def save(self):
return self.db[self.__tablename__].insert(self.objectManager)
def filter(self, *args, **kwargs):
return self.db[self.__tablename__].find(kwargs)
def get(self, id):
return self.db[self.__tablename__].find_one({'_id':id})
def all(self):
return self.db[self.__tablename__].find()
def delete(self):
self.db[self.__tablename__].remove()
class ObjectManager(dict):
def __getattr__(self, key):
if key in self:
return self[key]
else:
return None
def __setattr__(self, key, value):
if value != None:
self[key] = value
def __delattr__(self, key):
if key in self:
del self[key]
class Field(object):
def __init__(self, *args, **kwargs):
self.head_type = args[0]
self.length = kwargs.get('length')
self.not_null = kwargs.get('not_null')
self.default = kwargs.get('default')
self.primary_key = kwargs.get('primary_key')
self.callable = None
if self.head_type== 'int':
self.callable = self.valid_int
elif self.head_type == 'str':
self.callable = self.valid_str
elif self.head_type == 'datetime':
self.callable = self.valid_datetime
if config['database']['type'] == 'mysql':
self.base = MysqlBase()
elif config['database']['type'] == 'sqlite3':
self.base = SqliteBase()
elif config['database']['type'] == 'postgresql':
self.base = PostgresqlBase()
elif config['database']['type'] == 'mongodb':
self.base = MongoBase()
def valid_int(self, value):
if isinstance(value, int):
return True
def valid_str(self, value):
if isinstance(value, str):
return True
def valid_datetime(self, value):
import datetime
if isinstance(self, datetime):
return True
def format(self):
sql = None
if self.head_type == 'int':
if config['database']['type'] == 'sqlite3':
sql = ' INTEGER '
else:
sql = ' int '
elif self.head_type == 'str':
sql = ' char'
elif self.head_type == 'datetime':
sql = ' timestamp'
elif self.head_type == 'text':
sql = ' text'
if self.length and self.head_type == 'str':
sql += " ("+str(self.length)+")"
if self.not_null:
sql += " NOT NULL"
if self.default:
sql += " default '"+self.default+"'"
if self.primary_key:
sql = self.base.primary_key(sql)
return sql
def __call__(self, value):
return self.callable(value)
class Model(object):
def __init__(self, *args, **kwargs):
if config['database']['type'] == 'mysql':
self.base = MysqlBase()
elif config['database']['type'] == 'sqlite3':
self.base = SqliteBase()
elif config['database']['type'] == 'postgresql':
self.base = PostgresqlBase()
elif config['database']['type'] == 'mongodb':
self.base = MongoBase()
self.objectManager = ObjectManager()
for key, value in kwargs.iteritems():
if isinstance(getattr(self, key), (Field, ForeignKey)):
#if getattr(self, key)(value):
self.objectManager[key] = value
self.base.objectManager = self.objectManager
self.base.__tablename__ = self.__tablename__
def all(self):
return self._format_key_object(self.base.all())
def filter(self, *args, **kwargs):
return self._format_key_object(self.base.filter(*args, **kwargs))
def get(self, id):
return self._format_key_object(self.base.get(id))
def save(self):
return self.base.save()
def delete(self, *args, **kwargs):
self.base.delete(*args, **kwargs)
def create(self):
sql = []
for key in dir(self):
if isinstance(getattr(self, key), (Field, ForeignKey)):
sql.append(key + getattr(self, key).format())
sql = ",".join(sql)
sql = "CREATE TABLE %s (%s)" % (self.__tablename__, sql)
self.base.execute(sql)
def _format_key_object(self, data):
if isinstance(data, dict):
obj = ObjectManager()
for modu in dir(self):
if modu not in data.keys() and not modu.startswith("_"):
get_modu = getattr(self, modu)
if isinstance(get_modu, (ForeignKey, Relation, Field)):
obj[modu] = get_modu
if isinstance(get_modu, (ForeignKey, Relation)):
obj[modu].pk = data[obj[modu].get_key()]
for key, value in data.iteritems():
obj[key] = value
return obj
else:
result = []
for i in data:
obj = ObjectManager()
for modu in dir(self):
if modu not in i.keys() and not modu.startswith("_"):
get_modu = getattr(self, modu)
if isinstance(get_modu, (ForeignKey, Relation, Field)):
obj[modu] = get_modu
if isinstance(get_modu, (ForeignKey, Relation)):
obj[modu].pk = i[obj[modu].get_key()]
for key, value in i.iteritems():
obj[key] = value
result.append(obj)
return result
def register(* classs):
for i in classs:
globals().update({i.__name__:i})
def ilike_(kwargs):
if kwargs and len(kwargs) > 0:
return " and ".join(["%s like %s" % (k, "'%" + v + "%'") for k, v in kwargs.iteritems()]) if len(kwargs) > 1 else " ".join(["%s like %s" % (k, "'%" + v + "%'") for k, v in kwargs.iteritems()])
| Python |
#! /usr/bin/env python
'''
grepidemic.py - Modeling information flow through a network. Uses NodeBox to provide a gui
for simulations. Requires NodeBox
'''
__author__ = "Chris Hill"
__version__ = "0.1"
__license__ = "GPL"
import sys, os
graph = ximport("graph")
coreimage = ximport("coreimage")
FRAMES_PER_SECOND = 30
DAYS_PER_SECOND = 10
DEFAULT_INFECTION_RATE = .7
DYNAMIC_INFECT = True
DYNAMIC_PROB = DEFAULT_INFECTION_RATE * 100 + 1.5
DEBUG = True
filename = 'graphs/pl_25_3_60'
class InfectedGraph(object):
"""InfectedGraph models the spread of a infectious agent through a graph.
This agent can be the flu through a social network, or re-tweeting news
through the twitter social network.
Attributes:
g = graph
name = name of the graph
infected_date = most recent node infection
prob_infection = probability of an infection node spreading to it's neighbor
frame = timeshot of the universe
todo(cmhill)
"""
def __init__(self, g, name = ""):
self.g = g
self.infected_nodes = []
self.frame = 0
self.name = name
self.prob_infection = DEFAULT_INFECTION_RATE
self.infection_length = 30
self.infected_date = {}
self.infected_countdown = {}
self.chance_of_infection = {}
self.g.events.popup = True
self.g.events.click = self.infectNode
def header(self, txt, x = 50, y = 75, size = 55):
w = 200
fill(180.0/255, 198.0/255, 177.0/255, 1)
font("Courier", size)
text(txt, x, y, outline=True)
return textheight(txt)
def infectNode(self, node):
node.style = "important"
self.infected_date[node.id] = str(self.frame / FRAMES_PER_SECOND)
#node.style = "1, 0, 0.25, 0.75"
self.infected_nodes.append(node)
self.infected_countdown[node.id] = self.infection_length
print '[OUTBREAK] ' + node.id + ", @ t = " + str(self.frame)
def getNodeStats(self, g, node):
info = 'Infected on day: ' + self.infected_date.get(node, 'NA')
info += '\nInfection countdown: '
info += str(self.infected_countdown.get(node, 'NA'))
'''if self.infected_countdown.get(node, 'NA') is not 'NA':
info += str(self.infected_countdown.get(node, 30) / DAYS_PER_SECOND)
else:
info += 'NA'''
info += '\nChance of infection: ' + str(self.chance_of_infection.get(node, 'NA'))
return info
#return len(g.nodes)
def draw(self, iterations = 10):
global FRAMES_PER_SECOND
# Calculate percent of getting infected next iteration
for node in g.nodes:
self.g.events.popup_text[node.id] = self.getNodeStats(self.g, node.id)
if keydown and keycode == 49:
self.header("Day " + str(self.frame / 10), 10,75) #10, 975)
self.header(self.name, 10, 20, size = 20)
self.g.draw(weighted = True)
self.g.update(iterations)
else:
self.header("Day " + str(self.frame / 10), 10,75)#10, 975)
self.header(self.name, 10, 20, size = 20)
for node in g.nodes:
infect_nodes = []
immune_nodes = []
# Grow infected nodes, and shrink immune nodes.
if node.style is 'important':
node.r = node.r * 1.025
for neighbor in node.links:
#neighbor.style = 'important'
if neighbor.style is not 'highlight': infect_nodes.append(neighbor)
elif node.style is 'highlight':
node.r = node.r * .9756
if node.r < 8.0:
node.style = 'default'
node.r = 10.0
else:
for neighbor in node.links:
#neighbor.style = 'important'
if neighbor.style is 'important':
pass
#self.chance_of_infection
# Randomly infect another group of nodes
for inode in infect_nodes:
if inode.style is not 'highlight' and inode.style is not 'important':
if self.prob_infection * 100 < random(DYNAMIC_PROB) and DYNAMIC_INFECT:
self.infected_date[inode.id] = str(self.frame / DAYS_PER_SECOND)
self.g.events.popup_text[inode.id] = self.getNodeStats(self.g, inode.id)
inode.style = 'important'
inode.r = 8
self.infected_nodes.append(inode)
self.infected_countdown[inode.id] = self.infection_length
print '[INFECT] ' + node.id + ' -> ' + inode.id + ' @ t = ' + str(self.frame)
#for node in immune_nodes:
# node.style = 'default'
# node.r = 8.0
#for node in self.
# Go through all infected nodes and decrease their sickness timer.
# Also spread the illness by some chance.
for node in self.infected_nodes:
self.infected_countdown[node.id] = self.infected_countdown.get(node.id, FRAMES_PER_SECOND) - 1
if self.infected_countdown[node.id] < 0:
node.style = 'highlight'
self.infected_nodes.remove(node)
#node.r = 8.0
if self.frame % FRAMES_PER_SECOND == 60:
infect_nodes = []
immune_nodes = []
for node in self.g.nodes:
if node.style is 'important':
for neighbor in node.links:
#neighbor.style = 'important'
if neighbor.style is not 'highlight': infect_nodes.append(neighbor)
node.style = 'highlight'
elif node.style is 'highlight':
immune_nodes.append(node)
#node.style = 'default'
for node in infect_nodes:
if node.style is not 'highlight':
if self.prob_infection * 100 > random(100):
self.infected_date[node.id] = str(self.frame / DAYS_PER_SECOND)
self.g.events.popup_text[node.id] = self.getNodeStats(self.g, node.id)
node.style = 'important'
node.r = 8
self.infected_nodes.append(node)
for node in immune_nodes:
node.style = 'default'
node.r = 8.0
elif random(100) < 0:
infect_nodes = []
immune_nodes = []
for node in self.g.nodes:
if node.style is 'important':
for neighbor in node.links:
#neighbor.style = 'important'
if neighbor.style is not 'highlight': infect_nodes.append(neighbor)
node.style = 'highlight'
elif node.style is 'highlight':
immune_nodes.append(node)
#node.style = 'default'
for node in infect_nodes:
if node.style is not 'highlight':
if self.prob_infection * 100 < random(100):
self.infected_date[node.id] = str(self.frame / DAYS_PER_SECOND)
self.g.events.popup_text[node.id] = self.getNodeStats(self.g, node.id)
node.style = 'important'
node.r = 8
self.infected_nodes.append(node)
#print '[INFECT] ' + node.id + ' -> ' + inode.id
self.g.draw(weighted = True)#, 300, 300)
self.g.update(iterations)
self.frame += 1
global ig
g = graph.create(iterations=1000, distance=1.8, depth=False)
file = open(filename, 'r')
for line in file:
tuple = line.strip().split('\t')
g.add_edge(tuple[0], tuple[1])
g.solve()
# Created the infected graph
ig = InfectedGraph(g, filename)
g.draw()
speed(30)
def draw():
global ig
ig.draw(10) | Python |
#! /usr/bin/env python
'''
grepidemic.py - Modeling information flow through a network. Uses NodeBox to provide a gui
for simulations. Requires NodeBox
'''
__author__ = "Chris Hill"
__version__ = "0.1"
__license__ = "GPL"
import sys, os
graph = ximport("graph")
coreimage = ximport("coreimage")
FRAMES_PER_SECOND = 30
DAYS_PER_SECOND = 10
DEFAULT_INFECTION_RATE = .7
DYNAMIC_INFECT = True
DYNAMIC_PROB = DEFAULT_INFECTION_RATE * 100 + 1.5
DEBUG = True
filename = 'graphs/pl_25_3_60'
class InfectedGraph(object):
"""InfectedGraph models the spread of a infectious agent through a graph.
This agent can be the flu through a social network, or re-tweeting news
through the twitter social network.
Attributes:
g = graph
name = name of the graph
infected_date = most recent node infection
prob_infection = probability of an infection node spreading to it's neighbor
frame = timeshot of the universe
todo(cmhill)
"""
def __init__(self, g, name = ""):
self.g = g
self.infected_nodes = []
self.frame = 0
self.name = name
self.prob_infection = DEFAULT_INFECTION_RATE
self.infection_length = 30
self.infected_date = {}
self.infected_countdown = {}
self.chance_of_infection = {}
self.g.events.popup = True
self.g.events.click = self.infectNode
def header(self, txt, x = 50, y = 75, size = 55):
w = 200
fill(180.0/255, 198.0/255, 177.0/255, 1)
font("Courier", size)
text(txt, x, y, outline=True)
return textheight(txt)
def infectNode(self, node):
node.style = "important"
self.infected_date[node.id] = str(self.frame / FRAMES_PER_SECOND)
#node.style = "1, 0, 0.25, 0.75"
self.infected_nodes.append(node)
self.infected_countdown[node.id] = self.infection_length
print '[OUTBREAK] ' + node.id + ", @ t = " + str(self.frame)
def getNodeStats(self, g, node):
info = 'Infected on day: ' + self.infected_date.get(node, 'NA')
info += '\nInfection countdown: '
info += str(self.infected_countdown.get(node, 'NA'))
'''if self.infected_countdown.get(node, 'NA') is not 'NA':
info += str(self.infected_countdown.get(node, 30) / DAYS_PER_SECOND)
else:
info += 'NA'''
info += '\nChance of infection: ' + str(self.chance_of_infection.get(node, 'NA'))
return info
#return len(g.nodes)
def draw(self, iterations = 10):
global FRAMES_PER_SECOND
# Calculate percent of getting infected next iteration
for node in g.nodes:
self.g.events.popup_text[node.id] = self.getNodeStats(self.g, node.id)
if keydown and keycode == 49:
self.header("Day " + str(self.frame / 10), 10,75) #10, 975)
self.header(self.name, 10, 20, size = 20)
self.g.draw(weighted = True)
self.g.update(iterations)
else:
self.header("Day " + str(self.frame / 10), 10,75)#10, 975)
self.header(self.name, 10, 20, size = 20)
for node in g.nodes:
infect_nodes = []
immune_nodes = []
# Grow infected nodes, and shrink immune nodes.
if node.style is 'important':
node.r = node.r * 1.025
for neighbor in node.links:
#neighbor.style = 'important'
if neighbor.style is not 'highlight': infect_nodes.append(neighbor)
elif node.style is 'highlight':
node.r = node.r * .9756
if node.r < 8.0:
node.style = 'default'
node.r = 10.0
else:
for neighbor in node.links:
#neighbor.style = 'important'
if neighbor.style is 'important':
pass
#self.chance_of_infection
# Randomly infect another group of nodes
for inode in infect_nodes:
if inode.style is not 'highlight' and inode.style is not 'important':
if self.prob_infection * 100 < random(DYNAMIC_PROB) and DYNAMIC_INFECT:
self.infected_date[inode.id] = str(self.frame / DAYS_PER_SECOND)
self.g.events.popup_text[inode.id] = self.getNodeStats(self.g, inode.id)
inode.style = 'important'
inode.r = 8
self.infected_nodes.append(inode)
self.infected_countdown[inode.id] = self.infection_length
print '[INFECT] ' + node.id + ' -> ' + inode.id + ' @ t = ' + str(self.frame)
#for node in immune_nodes:
# node.style = 'default'
# node.r = 8.0
#for node in self.
# Go through all infected nodes and decrease their sickness timer.
# Also spread the illness by some chance.
for node in self.infected_nodes:
self.infected_countdown[node.id] = self.infected_countdown.get(node.id, FRAMES_PER_SECOND) - 1
if self.infected_countdown[node.id] < 0:
node.style = 'highlight'
self.infected_nodes.remove(node)
#node.r = 8.0
if self.frame % FRAMES_PER_SECOND == 60:
infect_nodes = []
immune_nodes = []
for node in self.g.nodes:
if node.style is 'important':
for neighbor in node.links:
#neighbor.style = 'important'
if neighbor.style is not 'highlight': infect_nodes.append(neighbor)
node.style = 'highlight'
elif node.style is 'highlight':
immune_nodes.append(node)
#node.style = 'default'
for node in infect_nodes:
if node.style is not 'highlight':
if self.prob_infection * 100 > random(100):
self.infected_date[node.id] = str(self.frame / DAYS_PER_SECOND)
self.g.events.popup_text[node.id] = self.getNodeStats(self.g, node.id)
node.style = 'important'
node.r = 8
self.infected_nodes.append(node)
for node in immune_nodes:
node.style = 'default'
node.r = 8.0
elif random(100) < 0:
infect_nodes = []
immune_nodes = []
for node in self.g.nodes:
if node.style is 'important':
for neighbor in node.links:
#neighbor.style = 'important'
if neighbor.style is not 'highlight': infect_nodes.append(neighbor)
node.style = 'highlight'
elif node.style is 'highlight':
immune_nodes.append(node)
#node.style = 'default'
for node in infect_nodes:
if node.style is not 'highlight':
if self.prob_infection * 100 < random(100):
self.infected_date[node.id] = str(self.frame / DAYS_PER_SECOND)
self.g.events.popup_text[node.id] = self.getNodeStats(self.g, node.id)
node.style = 'important'
node.r = 8
self.infected_nodes.append(node)
#print '[INFECT] ' + node.id + ' -> ' + inode.id
self.g.draw(weighted = True)#, 300, 300)
self.g.update(iterations)
self.frame += 1
global ig
g = graph.create(iterations=1000, distance=1.8, depth=False)
file = open(filename, 'r')
for line in file:
tuple = line.strip().split('\t')
g.add_edge(tuple[0], tuple[1])
g.solve()
# Created the infected graph
ig = InfectedGraph(g, filename)
g.draw()
speed(30)
def draw():
global ig
ig.draw(10) | Python |
#!/usr/bin/env python
# Copyright 2005,2006 Michael Rice
# errr@errr-online.com
# vim: noexpandtab:ts=4:sts=4
""" fluxStyle
fluxStyle is a graphical style-manager for the fluxbox
window manager. Orignal version written by Michael Rice.
Many special thanks to Zan a.k.a. Lauri Peltonen for GUI
Improvements & Bug Stomping.
Released under GPL v2.
TODO
- somehow support older styles and not put folders in the list
like folders that dont have anything to do with a style.
- fix any bugs that may still be there and unseen..
- add tray icon support (this is started will be done soon)
"""
import sys
import os
import gtk
import gtk.glade
from fluxstyle import findStyles,parseConfig
from os.path import isfile,expanduser,isdir
from fluxstyle import errorMessage
#GLADEFILE="/usr/share/fluxstyle/glade/main.glade"
#N_IMAGE="/usr/share/fluxstyle/images/none.jpg"
GLADEFILE="./glade/main.glade"
N_IMAGE="./images/none.jpg"
class StyleChange:
"""Class wrapper for changing styles in fluxbox"""
location = ""
def main(self):
gtk.main()
def __init__(self):
"""The main fluxStyle window will show"""
global location
windowname = "window1"
self.wTree = gtk.glade.XML (GLADEFILE,windowname)
self.treeview1 = self.wTree.get_widget("treeview1")
self.view_menu = self.wTree.get_widget("view1_menu")
self.__fill_view_menu__(self.view_menu)
handler = {"on_apply_style_clicked":self.__apply_style_clicked__,
"on_quit_clicked":(gtk.main_quit),
"on_add_style_clicked":self.__add_style_clicked__,
"on_remove_style_clicked":self.__remove_style_clicked__,
"on_quit1_activate":(gtk.main_quit),
"on_about1_activate":self.__about1_activate__,
"on_window1_destroy":(gtk.main_quit),
"on_default1_activate":self.__fill_combolist__}
self.wTree.signal_autoconnect (handler)
#Preparing the treeview here
self.liststore = gtk.ListStore(gtk.gdk.Pixbuf, str)
self.treeview1.set_model(self.liststore)
renderer = gtk.CellRendererText()
imagerenderer = gtk.CellRendererPixbuf()
imagerenderer.set_property('ypad', 10)
imagerenderer.set_property('xpad', 5)
column1 = gtk.TreeViewColumn("Preview", imagerenderer, pixbuf=0)
column1.set_resizable(True)
column2 = gtk.TreeViewColumn("Name", renderer, text=1)
column2.set_resizable(True)
self.treeview1.append_column(column1)
self.treeview1.append_column(column2)
#Fill it (Clear + fill)
self.__fill_combolist__(self.treeview1,loc="default")
return
# Call backs begin here
# fill combo list
def __fill_combolist__(self,widget,loc="default"):
"""Fill the combo list with styles test to see if there is a ~/.fluxbox/styles
if there isnt then make it and move on instead of die."""
global location
location = expanduser(loc)
if location == "default":
location = expanduser("~/.fluxbox/styles")
try:
dir = os.listdir(location)
dir.sort()
self.liststore.clear()
for styles in dir:
self.liststore.append((self.__get_preview__(styles), styles,))
except(OSError):
dir = expanduser("~/.fluxbox/styles")
os.makedirs(dir,mode=0700)
message = "You do not have a default style folder yet I have made it for you. "
message += "The list will remain empty until you install a style which "
message += "you can do by clicking the add button."
errorMessage.infoMessage(message)
else:
try:
dir = os.listdir(location)
dir.sort()
self.liststore.clear()
for styles in dir:
self.liststore.append((self.__get_preview__(styles), styles,))
except(OSError):
m = "You have an invalid location in your ~/.fluxStyle.rc file. It is possible "
m += "that you have a syntax error. Please exit fluxStlye and fix the error in "
m += "this file and try again."
errorMessage.infoMessage(m)
# get the preview image for view
def __get_preview__(self, stylename):
"""Get the preview image from: location + /styleName/preview.jpg"""
global location
location = expanduser(location)
image = gtk.Image()
if os.path.isdir(location + "/" + stylename):
if isfile(location+"/"+stylename+"/preview.jpg"):
image.set_from_file(location+"/" +stylename+"/preview.jpg")
else:
image.set_from_file(N_IMAGE)
return image.get_pixbuf()
def __fill_view_menu__(self, widget):
v_menuNam = None
if parseConfig.check4_config() == 2:
message = "This looks like the first time you have started fluxStlye "
message += "a default config has been created for you. You should edit "
message += "this config to control the location of styles shown in the "
message += "preview window. The config file is located in ~/.fluxStyle.rc"
errorMessage.infoMessage(message)
elif parseConfig.check4_config() == 3:
message = "You do not have the config file \"~/.fluxStyle.rc\" and you do "
message += "not have write access to the \"~/\" aka $HOME directory. If you "
message += "find this is not accurate information please report a bug to errr@"
message += "errr-online.com"
errorMessage.infoMessage(message)
elif parseConfig.check4_config() == True:
ops = parseConfig.parse_file(expanduser("~/.fluxStyle.rc"))
l = []
if ops != False:
count = 1
view = self.view_menu
for k,v in ops.iteritems():
if k == "STYLES_DIRS":
for x in v:
l.append(x.strip().split(','))
for i in l:
if len(i) <= 1:
name = "_"+str(count)+" %s"%(" Extra Styles")
menuitem = gtk.MenuItem(name + str(count))
menuitem.connect("activate", self.__fill_combolist__,i[0])
view.add(menuitem)
count += 1
else:
name = "_%s"%(i[0])
menuitem = gtk.MenuItem(name)
menuitem.connect("activate",self.__fill_combolist__,i[1])
view.add(menuitem)
view.show_all()
# Set style
def __apply_style_clicked__(self,widget):
"""Used to apply new styles"""
global location
style = self.__get_selected_style__()
if style:
findStyles.set_style(style,location)
# Add style
def __add_style_clicked__(self,widget):
"""Install a new style, multiple styles can be installed at once."""
dialog = gtk.FileChooserDialog("Choose file to install",
None,gtk.FILE_CHOOSER_ACTION_OPEN,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_OPEN, gtk.RESPONSE_OK))
dialog.set_default_response(gtk.RESPONSE_OK)
filter = gtk.FileFilter()
filter.set_name("Fluxbox Styles")
filter.add_mime_type("tar/gz")
filter.add_mime_type("tar/bz2")
filter.add_pattern("*.tar.gz")
filter.add_pattern("*.tar.bz2")
filter.add_pattern("*.tgz")
dialog.add_filter(filter)
dialog.set_select_multiple(True)
response = dialog.run()
if response == gtk.RESPONSE_OK:
findStyles.install_style(dialog.get_filenames())
self.__fill_combolist__(self)
dialog.destroy()
if response == gtk.RESPONSE_CANCEL:
dialog.destroy()
# remove style
def __remove_style_clicked__(self,widget):
"""Remove selected style, currently only 1 style at a time is supported"""
global location
style = self.__get_selected_style__()
if style == False:
m = "You must select a style to remove first"
errorMessage.infoMessage(m)
else:
message = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_INFO, \
gtk.BUTTONS_NONE, "Are you sure you want to delete %s?"%(style))
message.add_button(gtk.STOCK_OK, gtk.RESPONSE_OK)
message.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CLOSE)
response = message.run()
message.hide()
if response == gtk.RESPONSE_OK:
if findStyles.remove_style(style,location) != False:
message.destroy()
self.__fill_combolist__(self,location)
else:
say = "You do not have access to remove this style please contact "
say += "your system admin for help removing this style."
message = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_INFO, \
gtk.BUTTONS_NONE, say)
message.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE)
response = message.run()
message.hide()
if response == gtk.RESPONSE_CLOSE:
message.destroy()
if response == gtk.RESPONSE_CLOSE:
message.destroy()
def __get_selected_style__(self):
"""Getting the selected style"""
selection = self.treeview1.get_selection()
(model, iter) = selection.get_selected()
if model and iter:
return model.get_value(iter, 1)
else:
return False
def __close_about__(self,widget,event):
"""close about dialog"""
if event == gtk.RESPONSE_CANCEL:
self.aboutdialog.destroy()
def __about1_activate__(self,widget):
"""Activate the help button with the about dialog, use generic if pygtk < 2.5.9"""
if gtk.pygtk_version < (2,5,90):
message = "fluxStyle version 1.0 Updae your pygtk version for more features. Version "
message += "2.6.0 or newer is reccomended"
errorMessage.infoMessage(message)
else:
windowname2="aboutdialog1"
self.wTree2=gtk.glade.XML (GLADEFILE,windowname2)
handler = { "on_aboutdialog1_response":self.__close_about__}
self.wTree2.signal_autoconnect(handler)
self.aboutdialog = self.wTree2.get_widget("aboutdialog1")
if __name__ == "__main__":
style = StyleChange()
style.main()
| Python |
#!/usr/bin/env python
"""Fluxstyle is a graphical style manager built in python using pygtk and glade.
Fluxstyle is for the fluxbox window manager. Orignal version written by Michael Rice.
Many special thanks to Zan a.k.a. Lauri Peltonen for GUI Improvements & Bug Stomping.
Released under GPL"""
from distutils.core import setup
import distutils.fancy_getopt
import sys
DATADIR="/usr/share"
BINDIR="/usr/bin"
def chk_install():
"""try to pull in gtk crap to make sure deps are on box before install"""
ver = sys.version[:5]
try:
import gtk
except:
print "You seem to be missing gtk bindings for python"
print "Please install them before you install fluxstyle"
print "http://pygtk.org/"
raise SystemExit
try:
import gtk.glade
except:
print "You need to install libglade2"
print "http://ftp.gnome.org/pub/GNOME/sources/libglade/2.0/ "
print "or set your PYTHONPATH correctly. "
print "try: export PYTHONPATH=/usr/local/lib/python%s/site-packages/" % (ver)
print "or export: PYTHONPATH=/usr/lib/python%s/site-packages/" % (ver)
raise SystemExit
if gtk.pygtk_version < (2,3,90):
print "PyGtk 2.3.90 or later required for this program"
print "It is reccomended that you get pygtk 2.6 or newer for best results."
raise SystemExit
def main():
chk_install()
doclines = __doc__.split("\n")
setup(name='fluxstyle',
version='1.2',
description=doclines[0],
author='Michael Rice',
author_email='errr@errr-online.com',
url='http://fluxstyle.berlios.de/',
packages=['fluxstyle'],
data_files=[
(DATADIR+'/fluxstyle/images',
['images/fluxmetal.png','images/mini-fluxbox6.png','images/none.jpg']),
(DATADIR+'/fluxstyle/glade',['images/main.glade']),
(BINDIR,['fluxStyle']),
(DATADIR+'/fluxstyle/docs',['docs/README','docs/LICENSE','docs/Changelog'])
]
)
if __name__ == "__main__":
main()
| Python |
# Copyright 2005 Michael Rice
# errr@errr-online.com
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
"""Module to install remove and set styles for fluxbox"""
import tarfile,re,os
from os.path import expanduser
from os import system
from shutil import rmtree,copyfile
from sys import stdout
def set_style(style,location):
"""Select style and create entry in init file to reflect, then restart flux for change to take place"""
if location == "default":
location = "~/.fluxbox/styles"
newStyleName = "session.styleFile:\t"+expanduser(location + "/" + style + "\n")
oldStyleName = ""
copyfile(expanduser("~/.fluxbox/init"),expanduser("~/.fluxbox/init.bckp"))
cFile = open(expanduser("~/.fluxbox/init.bckp"),"r")
text = cFile.readlines()
cFile.close()
input = open(expanduser("~/.fluxbox/init.bckp"),"r")
styleLine = re.compile(r"session.styleFile")
for x in text:
if styleLine.search(x):
oldStyleName = x
output = stdout
output = open(expanduser("~/.fluxbox/init"),"w")
for s in input.readlines():
output.write(s.replace(oldStyleName,newStyleName))
output.close()
input.close()
# attempt to not have to make a seperate fedora package for odd name
# 'fluxbox-bin'
system('kill -s USR2 `xprop -root _BLACKBOX_PID | awk \'{print $3}\'`')
return
def install_style(file):
"""Install a valid tar.gz or tar.bz2 style foo.tar.gz/foo.tar.bz2 we check to see if it was packaged as styleFoo/ or as ~/.fluxbox/styles/styleFoo people package both ways"""
for i in file:
ins_dir = expanduser("~/.fluxbox/styles")
if tarfile.is_tarfile(i) == True:
# try first for bz2
try:
tar = tarfile.open(i, "r:bz2")
#maybe its tar.gz
except tarfile.ReadError:
try:
tar = tarfile.open(i, "r:gz")
#this isnt a bz2 or gz, so wtf is it?
except tarfile.ReadError:
#now return 2 to say weird file type..
return False
#we need to find out how the style was packaged
#if it is ~/.fluxbox/styles/styleName then we need a new
#install dir. otherwise use default.
check = tar.getnames()
x = re.compile('^\.fluxbox/styles/.+')
if x.match(check[0]) == None:
for i in tar:
tar.extract(i,ins_dir)
else:
ins_dir = expanduser("~/")
for i in tar:
tar.extract(i,ins_dir)
else:
# 2 == it wasnt even a tar file at all. This is a double check, we filter
#the file types in the file chooser to allow only tar.gz and tar.bz2
return 2
return
def remove_style(file,location):
"""This can be used to remove a style"""
if location == "default":
location = "~/.fluxbox/styles"
if os.access(expanduser(location+"/"+file), os.W_OK):
rmtree(expanduser(location +"/")+file)
return True
else:
return False
| Python |
import gtk,textwrap
def infoMessage(message):
message = textwrap.wrap(message,50)
mes = ""
for lines in message:
mes += lines+"\n"
m = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_INFO, \
gtk.BUTTONS_NONE, mes)
m.add_button(gtk.STOCK_OK, gtk.RESPONSE_CLOSE)
response = m.run()
m.hide()
if response == gtk.RESPONSE_CLOSE:
m.destroy()
| Python |
'''Written by Michael Rice
Copyright Nov 14, 2005
Released under the terms of the GNU GPL v2
Email: Michael Rice errr@errr-online.com
Website: http://errr-online.com/
'''
import os,re
from os.path import expanduser
def check4_config():
folder = expanduser("~/")
file = folder+".fluxStyle.rc"
w_ok = os.access(folder, os.W_OK)
f_ok = os.path.isfile(file)
if f_ok:
return True
elif not f_ok and w_ok:
write_config()
return 2
#file isnt there and we dont have premission to make it.
elif not w_ok and not f_ok:
return 3
def write_config():
conFile = """
# No need to add ~/.fluxbox/styles it is the default location and if it is listed it will
# be ignored. Currently the only option supported right now is STYLES_DIRS
# to choose the name that will display in the view menu use the following syntax
# Name,/location:Foo,/other/location:Bar,/another/location
# If the name identifier is left off "Extra Styles" will be used.
# The following line is an example of what to use if you have styles installed in these places
#STYLES_DIRS:Global,/usr/share/fluxbox/styles:Tenners,/usr/share/tenr-de-styles-pkg-1.0/styles/
"""
file = expanduser("~/.fluxStyle.rc")
file = open(file, "w")
file.write(conFile)
file.close()
# return 2
def parse_file(file):
"""read config file place results into a
dict file location provided by caller.
keys = options (USEICONS, ICONPATHS, etc)
values = values from options
config file should be in the form of:
OPTION:values:moreValuse:evenMore
do not end with ":" Comments are "#" as
the first char.
#OPTION:commet
OPTION:notComment #this is not valid comment
"""
file = expanduser(file)
opts = {}
if os.path.isfile(file):
match = re.compile(r"^[^#^\n]")
f = open(file)
info = f.readlines()
f.close()
keys = []
for lines in info:
if match.findall(lines):
keys.append( lines.strip().split(":") )
if len(keys) == 0:
return False
for i in range(len(keys)):
opts[keys[i][0]] = keys[i][1:]
return opts
else:
return False
if __name__ == "__main__":
#print parse_file("~/.fluxStyle.rc")
x = parse_file("~/.fluxStyle.rc")
l = []
for k,v in x.iteritems():
if k == "STYLES_DIRS":
for i in v:
l.append( i.strip().split(",") )
for i in l:
if len(i) <= 1:
print "default ", i[0]
else:
print i[0], i[1]
| Python |
# Copyright 2005,2006 Michael Rice
# errr@errr-online.com
# vim:set noexpandtab:ts=4:sts=4 textwidth=79:
""" fluxStyle
fluxStyle is a graphical style-manager for the fluxbox
window manager. Orignal version written by Michael Rice.
Many special thanks to Zan a.k.a. Lauri Peltonen for GUI
Improvements & Bug Stomping.
Released under the GNU GPL
"""
__AUTHORS__ = [ ("Michael Rice", "errr@errr-online.com"),("Lauri Peltonen",\
"zan@users.berlios.de")]
| Python |
#!/usr/bin/python
host = ''
user = ''
passwd = ''
db = ''
| Python |
#!/usr/bin/python
host = ''
user = ''
passwd = ''
db = ''
| Python |
#!/usr/bin/python
__author__ = "Sumin Byeon <suminb@gmail.com>"
import base64
import re
import urlparse
import urllib, urllib2
import sys, os
import string
import random
import time, datetime, calendar
import MySQLdb, _mysql_exceptions
import auth
settings = {
'max-depth': 4,
'timeout': 10,
}
def geturls(content, parenturl):
pos = 0
n = len(content)
parenturl = urlparse.urlparse(parenturl)
#print 'Looking for links...'
urls = []
while pos <= n:
localcontent = content[pos:n]
m = re.search('(<a [^>]*href=\"[0-9a-zA-Z.:;/%?&=-_]+|<(frame|iframe|img) [^>]*src=\"[0-9a-zA-Z.:;/%?&=-_]+)', localcontent, re.IGNORECASE)
if m == None:
break
pos = pos + m.start()
localcontent = localcontent[m.start():n]
m = re.search('(src|href)=\"[0-9a-zA-Z.:;/%?&=-_]+\"', localcontent, re.IGNORECASE)
if m == None:
break
pos = pos + m.end()
localcontent = localcontent[m.start():m.end()]
offset = 0
if localcontent[0:3].lower() == 'src':
offset = 3
elif localcontent[0:4].lower() == 'href':
offset = 4
url = localcontent[offset+2:len(localcontent)-1].strip()
if re.match('(about|javascript|mailto):.*', url, re.IGNORECASE):
continue
if url[len(url)-1] == '/':
url = url[0:len(url)-1]
if len(url) <= 0:
continue
if not re.match('(http|https).+', url):
if url[0] == '/':
url = '%s://%s%s' % (parenturl.scheme, parenturl.netloc, url)
elif url[0:3] == '../':
url = '%s://%s%s/%s' % (parenturl.scheme, parenturl.netloc, '/'.join((parenturl.path.split('/')[0:-2])), url)
else:
url = '%s://%s%s/%s' % (parenturl.scheme, parenturl.netloc, '/'.join((parenturl.path.split('/')[0:-1])), url)
urls.append(url)
#print ' %d link(s) were found' % len(urls)
commit_urls(urls)
return urls
def commit_urls(urls):
url_count = len(urls)
duplicate_count = 0
sys.stdout.write(' ')
for url in urls:
query = "INSERT INTO url (url) VALUES('%s')" % url
try:
dbc.execute(query)
sys.stdout.write('+')
except _mysql_exceptions.IntegrityError:
urls.remove(url)
duplicate_count += 1
sys.stdout.write('-')
continue
except Exception, e:
#sys.stderr.write(str(e))
print e
continue
finally:
sys.stdout.flush()
sys.stdout.write('\n')
db.commit()
print ' URLs: %d found, %d in database, %d committed.' % (url_count, duplicate_count, url_count-duplicate_count)
def geturlrow(url):
urlrow = None
try:
dbc.execute("SELECT * FROM url where url = %s LIMIT 1", (url))
urlrow = dbc.fetchone()
except Exception, e:
print e
return urlrow
def visit(url, depth):
if depth > settings['max-depth']:
return
print 'Crawling %s in depth %d...' % (url, depth)
#opener = urllib2.build_opener()
data = {}
data = urllib.urlencode(data)
headers = { 'User-Agent' : 'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.0.4) Gecko/2008111319 Ubuntu/8.10 (intrepid) Firefox/3.0.4' }
request = urllib2.Request(url, data, headers)
response = None
response_code = 0
try:
#page = response.open(url)
response = urllib2.urlopen(request)
response_code = response.getcode()
except urllib2.HTTPError, e:
print e
response_code = e.getcode()
except urllib2.URLError, e:
print e
row = geturlrow(url)
if row == None:
sys.stderr.write('URL does not exist in the database')
return
last_visit = 0
if row['last_visit'] != None:
last_visit = calendar.timegm(row['last_visit'].timetuple())
current = int(time.time())
content = None
if current - last_visit < 3600*24:
print ' This page has been visited recently, skipping...'
dbc.execute('SELECT * FROM page WHERE url_id = %s ORDER BY `date` DESC LIMIT 1', row['id'])
pagerow = dbc.fetchone()
if pagerow != None:
content = pagerow['content']
else:
try:
result = dbc.execute('UPDATE url SET last_response = %s, last_visit = FROM_UNIXTIME(%s) WHERE id = %s LIMIT 1', (response_code, int(time.time()), row['id']))
except Exception, e:
print 'Exception:', e
if response != None:
try:
content = response.read()
except Exception, e:
print ' Exception: Could not read the page:', e
return
if response.headers.type[0:4] != 'text':
content = base64.b64encode(content)
query = "INSERT INTO page (url_id, type, content) VALUES(%s, %s, %s)"
args = (row['id'], response.headers.type, content)
try:
dbc.execute(query, args)
except Exception, e:
print ' Exception:', e
try:
db.commit()
except Exception, e:
print ' Exception:', e
if response != None and response.headers.type == 'text/html':
return geturls(content, url)
else:
return []
'''
urls = geturls(content, url)
if len(urls) > 0:
for u in urls:
visit(u, depth+1)
'''
# TODO: Needs to be replaced with getopts or something
if len(sys.argv) > 2:
settings['max-depth'] = int(sys.argv[2])
db = None
try:
db = MySQLdb.connect(host=auth.host, user=auth.user, passwd=auth.passwd, db=auth.db)
except Exception, e:
print 'Could not connect to database:', e
exit(1)
dbc = db.cursor(MySQLdb.cursors.DictCursor)
if len(sys.argv) > 1:
url = sys.argv[1]
commit_urls([url])
urls = visit(url, 0)
for url in urls:
visit(url, 0)
else:
while 1:
urlrowset = None
try:
dbc.execute('SELECT * FROM url WHERE last_response is NULL ORDER BY RAND() LIMIT 25')
urlrowset = dbc.fetchall()
except Exception, e:
print e
for urlrow in urlrowset:
visit(urlrow['url'], 0)
db.close()
| Python |
#!/usr/bin/python
__author__ = "Sumin Byeon <suminb@gmail.com>"
import base64
import re
import urlparse
import urllib, urllib2
import sys, os
import string
import random
import time, datetime, calendar
import MySQLdb, _mysql_exceptions
import auth
settings = {
'max-depth': 4,
'timeout': 10,
}
def geturls(content, parenturl):
pos = 0
n = len(content)
parenturl = urlparse.urlparse(parenturl)
#print 'Looking for links...'
urls = []
while pos <= n:
localcontent = content[pos:n]
m = re.search('(<a [^>]*href=\"[0-9a-zA-Z.:;/%?&=-_]+|<(frame|iframe|img) [^>]*src=\"[0-9a-zA-Z.:;/%?&=-_]+)', localcontent, re.IGNORECASE)
if m == None:
break
pos = pos + m.start()
localcontent = localcontent[m.start():n]
m = re.search('(src|href)=\"[0-9a-zA-Z.:;/%?&=-_]+\"', localcontent, re.IGNORECASE)
if m == None:
break
pos = pos + m.end()
localcontent = localcontent[m.start():m.end()]
offset = 0
if localcontent[0:3].lower() == 'src':
offset = 3
elif localcontent[0:4].lower() == 'href':
offset = 4
url = localcontent[offset+2:len(localcontent)-1].strip()
if re.match('(about|javascript|mailto):.*', url, re.IGNORECASE):
continue
if url[len(url)-1] == '/':
url = url[0:len(url)-1]
if len(url) <= 0:
continue
if not re.match('(http|https).+', url):
if url[0] == '/':
url = '%s://%s%s' % (parenturl.scheme, parenturl.netloc, url)
elif url[0:3] == '../':
url = '%s://%s%s/%s' % (parenturl.scheme, parenturl.netloc, '/'.join((parenturl.path.split('/')[0:-2])), url)
else:
url = '%s://%s%s/%s' % (parenturl.scheme, parenturl.netloc, '/'.join((parenturl.path.split('/')[0:-1])), url)
urls.append(url)
#print ' %d link(s) were found' % len(urls)
commit_urls(urls)
return urls
def commit_urls(urls):
url_count = len(urls)
duplicate_count = 0
sys.stdout.write(' ')
for url in urls:
query = "INSERT INTO url (url) VALUES('%s')" % url
try:
dbc.execute(query)
sys.stdout.write('+')
except _mysql_exceptions.IntegrityError:
urls.remove(url)
duplicate_count += 1
sys.stdout.write('-')
continue
except Exception, e:
#sys.stderr.write(str(e))
print e
continue
finally:
sys.stdout.flush()
sys.stdout.write('\n')
db.commit()
print ' URLs: %d found, %d in database, %d committed.' % (url_count, duplicate_count, url_count-duplicate_count)
def geturlrow(url):
urlrow = None
try:
dbc.execute("SELECT * FROM url where url = %s LIMIT 1", (url))
urlrow = dbc.fetchone()
except Exception, e:
print e
return urlrow
def visit(url, depth):
if depth > settings['max-depth']:
return
print 'Crawling %s in depth %d...' % (url, depth)
#opener = urllib2.build_opener()
data = {}
data = urllib.urlencode(data)
headers = { 'User-Agent' : 'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.0.4) Gecko/2008111319 Ubuntu/8.10 (intrepid) Firefox/3.0.4' }
request = urllib2.Request(url, data, headers)
response = None
response_code = 0
try:
#page = response.open(url)
response = urllib2.urlopen(request)
response_code = response.getcode()
except urllib2.HTTPError, e:
print e
response_code = e.getcode()
except urllib2.URLError, e:
print e
row = geturlrow(url)
if row == None:
sys.stderr.write('URL does not exist in the database')
return
last_visit = 0
if row['last_visit'] != None:
last_visit = calendar.timegm(row['last_visit'].timetuple())
current = int(time.time())
content = None
if current - last_visit < 3600*24:
print ' This page has been visited recently, skipping...'
dbc.execute('SELECT * FROM page WHERE url_id = %s ORDER BY `date` DESC LIMIT 1', row['id'])
pagerow = dbc.fetchone()
if pagerow != None:
content = pagerow['content']
else:
try:
result = dbc.execute('UPDATE url SET last_response = %s, last_visit = FROM_UNIXTIME(%s) WHERE id = %s LIMIT 1', (response_code, int(time.time()), row['id']))
except Exception, e:
print 'Exception:', e
if response != None:
try:
content = response.read()
except Exception, e:
print ' Exception: Could not read the page:', e
return
if response.headers.type[0:4] != 'text':
content = base64.b64encode(content)
query = "INSERT INTO page (url_id, type, content) VALUES(%s, %s, %s)"
args = (row['id'], response.headers.type, content)
try:
dbc.execute(query, args)
except Exception, e:
print ' Exception:', e
try:
db.commit()
except Exception, e:
print ' Exception:', e
if response != None and response.headers.type == 'text/html':
return geturls(content, url)
else:
return []
'''
urls = geturls(content, url)
if len(urls) > 0:
for u in urls:
visit(u, depth+1)
'''
# TODO: Needs to be replaced with getopts or something
if len(sys.argv) > 2:
settings['max-depth'] = int(sys.argv[2])
db = None
try:
db = MySQLdb.connect(host=auth.host, user=auth.user, passwd=auth.passwd, db=auth.db)
except Exception, e:
print 'Could not connect to database:', e
exit(1)
dbc = db.cursor(MySQLdb.cursors.DictCursor)
if len(sys.argv) > 1:
url = sys.argv[1]
commit_urls([url])
urls = visit(url, 0)
for url in urls:
visit(url, 0)
else:
while 1:
urlrowset = None
try:
dbc.execute('SELECT * FROM url WHERE last_response is NULL ORDER BY RAND() LIMIT 25')
urlrowset = dbc.fetchall()
except Exception, e:
print e
for urlrow in urlrowset:
visit(urlrow['url'], 0)
db.close()
| Python |
#!/usr/bin/env python
# coding=utf-8
# Based on GAppProxy by Du XiaoGang <dugang@188.com>
# Based on WallProxy 0.4.0 by hexieshe <www.ehust@gmail.com>
__version__ = 'beta'
__author__ = 'phus.lu@gmail.com'
__password__ = ''
import zlib, logging, time, re, struct
from google.appengine.ext import webapp
from google.appengine.ext import db
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api import urlfetch
from google.appengine.runtime import apiproxy_errors, DeadlineExceededError
def encode_data(dic):
return '&'.join('%s=%s' % (k, str(v).encode('hex')) for k, v in dic.iteritems())
def decode_data(qs):
return dict((k, v.decode('hex')) for k, v in (x.split('=') for x in qs.split('&')))
class MainHandler(webapp.RequestHandler):
FRP_Headers = ('', 'x-google-cache-control', 'via')
Fetch_Max = 3
Fetch_MaxSize = 512*1000
Deadline = (15, 30)
def sendResponse(self, status_code, headers, content='', method='', url=''):
self.response.headers['Content-Type'] = 'application/octet-stream'
contentType = headers.get('content-type', '').lower()
headers = encode_data(headers)
# Build send-data
rdata = '%s%s%s' % (struct.pack('>3I', status_code, len(headers), len(content)), headers, content)
if contentType.startswith(('text', 'application')):
data = zlib.compress(rdata)
data = '1'+data if len(rdata)>len(data) else '0'+rdata
else:
data = '0' + rdata
if status_code == 555:
logging.warning('Response: "%s %s" %s' % (method, url, content))
else:
logging.debug('Response: "%s %s" %d %d/%d/%d' % (method, url, status_code, len(content), len(rdata), len(data)))
return self.response.out.write(data)
def sendNotify(self, status_code, content, method='', url='', fullContent=False):
if not fullContent and status_code!=555:
content = '<h2>Fetch Server Info</h2><hr noshade="noshade"><p>Code: %d</p>' \
'<p>Message: %s</p>' % (status_code, content)
headers = {'content-type':'text/html', 'content-length':len(content)}
self.sendResponse(status_code, headers, content, method, url)
def post(self):
request = decode_data(zlib.decompress(self.request.body))
if __password__ and __password__ != request.get('password', ''):
return self.sendNotify(403, 'Fobbidon -- wrong password. Please check your proxy.ini and fetch.py.')
method = request.get('method', 'GET')
fetch_method = getattr(urlfetch, method, '')
if not fetch_method:
return self.sendNotify(555, 'Invalid Method', method)
url = request.get('url', '')
if not url.startswith('http'):
return self.sendNotify(555, 'Unsupported Scheme', method, url)
payload = request.get('payload', '')
deadline = MainHandler.Deadline[1 if payload else 0]
fetch_range = 'bytes=0-%d' % (MainHandler.Fetch_MaxSize - 1)
rangeFetch = False
headers = {}
for line in request.get('headers', '').splitlines():
kv = line.split(':', 1)
if len(kv) != 2:
continue
key = kv[0].strip().lower()
value = kv[1].strip()
#if key in MainHandler.FRS_Headers:
# continue
if key == 'rangefetch':
rangeFetch = True
continue
if key =='range' and not rangeFetch:
m = re.search(r'(\d+)?-(\d+)?', value)
if m is None:
continue
start, end = m.group(1, 2)
if not start and not end:
continue
if not start and int(end) > MainHandler.Fetch_MaxSize:
end = '1023'
elif not end or int(end)-int(start)+1 > MainHandler.Fetch_MaxSize:
end = str(MainHandler.Fetch_MaxSize - 1 + int(start))
fetch_range = ('bytes=%s-%s' % (start, end))
headers[key] = value
headers['Connection'] = 'close'
for i in range(MainHandler.Fetch_Max):
try:
response = urlfetch.fetch(url, payload, fetch_method, headers, False, False, deadline)
#if method=='GET' and len(response.content)>0x1000000:
# raise urlfetch.ResponseTooLargeError(None)
break
except apiproxy_errors.OverQuotaError, e:
time.sleep(4)
except DeadlineExceededError:
logging.error('DeadlineExceededError(deadline=%s, url=%r)', deadline, url)
time.sleep(1)
deadline = MainHandler.Deadline[1]
except urlfetch.InvalidURLError, e:
return self.sendNotify(555, 'Invalid URL: %s' % e, method, url)
except urlfetch.ResponseTooLargeError, e:
if method == 'GET':
deadline = MainHandler.Deadline[1]
if not rangeFetch:
headers['Range'] = fetch_range
else:
return self.sendNotify(555, 'Response Too Large: %s' % e, method, url)
except Exception, e:
if i==0 and method=='GET':
deadline = MainHandler.Deadline[1]
if not rangeFetch:
headers['Range'] = fetch_range
else:
return self.sendNotify(555, 'Urlfetch error: %s' % e, method, url)
for k in MainHandler.FRP_Headers:
if k in response.headers:
del response.headers[k]
if 'set-cookie' in response.headers:
scs = response.headers['set-cookie'].split(', ')
cookies = []
i = -1
for sc in scs:
if re.match(r'[^ =]+ ', sc):
try:
cookies[i] = '%s, %s' % (cookies[i], sc)
except IndexError:
pass
else:
cookies.append(sc)
i += 1
response.headers['set-cookie'] = '\r\nSet-Cookie: '.join(cookies)
response.headers['connection'] = 'close'
return self.sendResponse(response.status_code, response.headers, response.content, method, url)
def get(self):
self.response.headers['Content-Type'] = 'text/html; charset=utf-8'
self.response.out.write( \
'''
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>GoAgent %(version)s on GAE/已经在工作了</title>
</head>
<body>
<table width="800" border="0" align="center">
<tr><td align="center"><hr></td></tr>
<tr><td align="center">
<b><h1>GoAgent %(version)s on GAE/已经在工作了</h1></b>
</td></tr>
<tr><td align="center"><hr></td></tr>
<tr><td align="center">
GoAgent是一个开源的HTTP Proxy软件,使用Python编写,运行于Google App Engine平台上.
</td></tr>
<tr><td align="center"><hr></td></tr>
<tr><td align="center">
更多相关介绍,请参考<a href="https://github.com/phus/goagent">GoAgent项目主页</a>.
</td></tr>
<tr><td align="center"><hr></td></tr>
<tr><td align="center">
<img src="http://code.google.com/appengine/images/appengine-silver-120x30.gif" alt="Powered by Google App Engine" />
</td></tr>
<tr><td align="center"><hr></td></tr>
</table>
</body>
</html>
''' % dict(version=__version__))
def main():
application = webapp.WSGIApplication([(r'/.*', MainHandler)], debug=True)
run_wsgi_app(application)
if __name__ == '__main__':
main() | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
from UserDict import IterableUserDict
__all__ = ['HTTPHeaders']
class HTTPHeaders(IterableUserDict):
def __setitem__(self, key, item):
self.data[key.title()] = item
def add(self, key, item):
key = key.title()
if key in self.data:
self.data[key] = '%s\r\n%s: %s' % (self.data[key], key, item)
else:
self.data[key] = item
def __delitem__(self, key):
try:
del self.data[key]
except KeyError:
pass
def readheaders(self, fp):
if isinstance(fp, basestring):
fp = fp.splitlines()
for line in fp:
k, s, v = line.partition(':')
if not s: break
self.add(k, v.strip())
def update(self, dic=None, **kwargs):
if not dic:
pass
elif isinstance(dic, HTTPHeaders):
self.data.update(dic.data)
elif isinstance(dic, basestring) or hasattr(dic, 'readline'):
self.readheaders(dic)
else:
try:
for k in dic.keys():
self[k] = dic[k]
except AttributeError:
for k,v in dic:
self.add(k, v)
if kwargs:
self.update(kwargs)
def __str__(self):
buf = [None] * len(self.data)
for i,v in enumerate(self.data.iteritems()):
buf[i] = '%s: %s\r\n' % v
return ''.join(buf)
def __getstate__(self):
return self.data
def __setstate__(self, state):
self.data = state | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = '%s & %s' % ('d3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64'),
'YnJvbnplMW1hbkBnbWFpbC5jb20='.decode('base64'))
import hashlib, itertools
__all__ = ['Crypto']
class XOR:
'''XOR with pure Python in case no PyCrypto'''
def __init__(self, key):
self.key = key
def encrypt(self, data):
xorsize = 1024
key = itertools.cycle(map(ord, self.key))
dr = xrange(0, len(data), xorsize)
ss = [None] * len(dr)
for i,j in enumerate(dr):
dd = [ord(d)^k for d,k in itertools.izip(data[j:j+xorsize], key)]
ss[i] = ''.join(map(chr, dd))
return ''.join(ss)
decrypt = encrypt
class NUL:
def encrypt(self, data):
return data
decrypt = encrypt
class Crypto:
_BlockSize = {'AES':16, 'ARC2':8, 'ARC4':1, 'Blowfish':8, 'CAST':8,
'DES':8, 'DES3':8, 'IDEA':8, 'RC5':8, 'XOR':1}
_Modes = ['ECB', 'CBC', 'CFB', 'OFB', 'PGP'] #CTR needs 4 args
_KeySize = {'AES':[16,24,32], 'CAST':xrange(5,17),
'DES':[8], 'DES3':[16,24], 'IDEA':[16]}
def __init__(self, mode='AES-CBC-32'):
mode = mode.split('-')
mode += [''] * (3 - len(mode))
#check cipher
self.cipher = mode[0] if mode[0] else 'AES'
if self.cipher not in self._BlockSize:
raise ValueError('Invalid cipher: '+self.cipher)
#check ciphermode
if self._BlockSize[self.cipher] == 1:
self.ciphermode = ''
else:
self.ciphermode = mode[1] if mode[1] in self._Modes else 'CBC'
#check keysize
try:
self.keysize = int(mode[2])
except ValueError:
self.keysize = 32
if self.keysize != 0:
if self.cipher in self._KeySize:
keysize = self._KeySize[self.cipher]
if self.keysize not in keysize:
self.keysize = keysize[-1]
#avoid Memmory Error
if self.cipher=='RC5' and self.keysize in (1, 57): self.keysize=32
#try to import Crypto.Cipher.xxxx
try:
cipherlib = __import__('Crypto.Cipher.'+self.cipher, fromlist='x')
self._newobj = cipherlib.new
if self._BlockSize[self.cipher] != 1:
self._ciphermode = getattr(cipherlib, 'MODE_'+self.ciphermode)
except ImportError:
if self.cipher == 'XOR': self._newobj = XOR
else: raise
def paddata(self, data):
blocksize = self._BlockSize[self.cipher]
if blocksize != 1:
padlen = (blocksize - len(data) - 1) % blocksize
data = '%s%s%s' % (chr(padlen), ' '*padlen, data)
return data
def unpaddata(self, data):
if self._BlockSize[self.cipher] != 1:
padlen = ord(data[0])
data = data[padlen+1:]
return data
def getcrypto(self, key):
if self.keysize==0 and key=='':
return NUL()
khash = hashlib.sha512(key).digest()
if self.keysize != 0:
key = khash[:self.keysize]
blocksize = self._BlockSize[self.cipher]
if blocksize == 1:
return self._newobj(key)
return self._newobj(key, self._ciphermode, khash[-blocksize:])
def encrypt(self, data, key):
crypto = self.getcrypto(key)
data = self.paddata(data)
return crypto.encrypt(data)
def decrypt(self, data, key):
crypto = self.getcrypto(key)
data = crypto.decrypt(data)
return self.unpaddata(data)
def getmode(self):
return '%s-%s-%d' % (self.cipher, self.ciphermode, self.keysize)
def __str__(self):
return '%s("%s")' % (self.__class__, self.getmode())
def getsize(self, size):
blocksize = self._BlockSize[self.cipher]
return (size + blocksize - 1) // blocksize * blocksize
class Crypto2(Crypto):
def paddata(self, data):
blocksize = self._BlockSize[self.cipher]
if blocksize != 1:
padlen = (blocksize - len(data) - 1) % blocksize
data = '%s%s%s' % (data, ' '*padlen, chr(padlen))
return data
def unpaddata(self, data):
if self._BlockSize[self.cipher] != 1:
padlen = ord(data[-1])
data = data[:-(padlen+1)]
return data | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'ZHRtYWppYUAxNjMuY29t'.decode('base64')
__version__ = '0.0.2'
import gaeproxy
class MainHandler(gaeproxy.MainHandler):
_cachename = 'wp_cache2'
def dump_data(self, dic):
return '&'.join('%s=%s' % (k,str(v).encode('hex')) for k,v in dic.iteritems())
def load_data(self, qs):
return dict((k,v.decode('hex')) for k,v in (x.split('=') for x in qs.split('&'))) if qs else {}
def main():
application = gaeproxy.webapp.WSGIApplication([(r'/.*', MainHandler)], debug=True)
gaeproxy.run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
__version__ = '1.0.2'
from util import crypto, httpheaders
import cPickle as pickle
import zlib, logging, time, re, struct
from google.appengine.ext import webapp, db
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api import urlfetch, memcache
from google.appengine.runtime import apiproxy_errors
urlfetch._CaselessDict = httpheaders.HTTPHeaders
class WPConfig(db.Model):
cfgCacheTime = db.IntegerProperty(required=True, default=5*60)
cacheTime = db.IntegerProperty(required=True, default=24*3600)
maxSize = db.IntegerProperty(required=True, default=9000000)
siteKey = db.StringProperty(default=u'')
cryptoMode = db.StringProperty(default=u'XOR--32')
version = db.StringProperty()
def getConfig():
config = memcache.get('config', namespace='wp_config')
if config is None:
cfg = WPConfig.all().get()
if cfg and cfg.version < __version__:
cfg.delete(); cfg = None
memcache.flush_all()
if not cfg:#No Entry
cfg = WPConfig(version=__version__)
cfg.put()
config = {'cacheTime':cfg.cacheTime, 'maxSize':cfg.maxSize,
'siteKey':cfg.siteKey.encode('utf-8').decode('string_escape'),
'crypto':cfg.cryptoMode.encode('utf-8')}
if not memcache.set('config', config, cfg.cfgCacheTime, namespace='wp_config'):
logging.error('Memcache set wp_config failed')
return config
def _init_config(crypto_cls):
config = getConfig()
config['crypto'] = crypto_cls(config['crypto'])
return config
class MainHandler(webapp.RequestHandler):
_cfg = _init_config(crypto.Crypto)
_dirty_headers = ('X-Google-Cache-Control', 'Via')
_setcookie_re = re.compile(r'[^ =]+ ')
_crange_re = re.compile(r'bytes\s+(\d+)-(\d+)/(\d+)')
_try_times = 2
_deadline = (5, 10)
_cachename = 'wp_cache'
def dump_data(self, data):
return pickle.dumps(data, 1)
def load_data(self, data):
return pickle.loads(data)
def _need_cache(self, method, code, headers):
need_cache = False
if (method=='GET' and code==200 and self._cfg['cacheTime'] and
headers.get('Content-Type', '').lower().find('text/html')<0 and
headers.get('Pragma', '').find('no-cache')<0 and
headers.get('Cache-Control', '').find('no-cache')<0 and
'Set-Cookie' not in headers):
t = time.gmtime(time.time() + self._cfg['cacheTime'])
headers['X-Cache'] = 'HIT from WallProxy'
headers['Expires'] = time.strftime('%a, %d %b %Y %H:%M:%S GMT', t)
headers['Cache-Control']='public, max-age=%d'%self._cfg['cacheTime']
t = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime())
headers['Last-Modified'] = t
need_cache = True
return need_cache
def _pack_data(self, code, headers, data):
crypto = self._cfg['crypto']
ct = headers.get('Content-Type', '').lower()
headers = self.dump_data(headers.__getstate__())
zip = 0
if ct.find('text')>=0 or ct.find('application')>=0:
cdata = crypto.paddata(zlib.compress(headers+data))
if len(cdata) < (crypto.getsize(len(headers)) +
crypto.getsize(len(data))): zip = 1
if zip == 0:
headers = crypto.paddata(headers)
data = crypto.paddata(data)
info = struct.pack('>BHI', zip, code, len(headers))
if zip == 1: return info, '', cdata
return info, headers, data
def _send_data(self, data, length):
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.headers['Content-Length'] = str(length)
crypto = self._cfg['crypto'].getcrypto(self._cfg['siteKey'])
info, headers, data = data
self.response.out.write(info)
if headers:
self.response.out.write(crypto.encrypt(headers))
self.response.out.write(crypto.encrypt(data))
def sendResponse(self, code, headers, content, method, url):
need_cache = self._need_cache(method, code, headers)
data = self._pack_data(code, headers, content)
length = len(data[0])+len(data[1])+len(data[2])
if need_cache and length<1000000:
try:
if not memcache.set(url, data, self._cfg['cacheTime'], namespace=self._cachename):
logging.warning('Memcache set %s(%s) failed' % (self._cachename, url))
except: pass
if code == 555:
logging.warning('Response: "%s %s" %s' % (method, url, content))
else:
logging.debug('Response: "%s %s" %d %d/%d' % (
method, url, code, len(content), length))
self._send_data(data, length)
def sendNotify(self, code, content, method='', url='', fullContent=False):
if not fullContent and code!=555:
content = ('<h2>Fetch Server Info</h2><hr noshade="noshade">'
'<p>Code: %d</p><p>Message: %s</p>' % (code, content))
headers = httpheaders.HTTPHeaders({'Server':'WallProxy/%s'%__version__,
'Content-Type':'text/html', 'Content-Length':len(content)})
self.sendResponse(code, headers, content, method, url)
def handle_wallproxy(self, method, url):
import os
appid = '%s v%s' % (os.environ['APPLICATION_ID'],
os.environ['CURRENT_VERSION_ID'])
path = url[len('http://wallproxy'):].lower()
if path == '/cache':
resp = ('<h2>Memcache Status(%s)</h2><hr noshade="noshade">'
'<p>%s</p>' % (appid, memcache.get_stats()))
return self.sendNotify(200, resp, method, url, True)
elif path == '/cache/reset' and memcache.flush_all():
crypto_cls = self._cfg['crypto'].__class__
self.__class__._cfg = _init_config(crypto_cls) #reset password
return self.sendNotify(200, 'Memcache Reseted(%s)'%appid,method,url)
else:
resp = []
for k,v in self._cfg.iteritems():
resp.append('%s: %s' % (k, v))
resp = ('<h2>WallProxy Configuration(%s)</h2><hr noshade="noshade">'
'<pre>%s</pre>' % (appid, '\n'.join(resp)))
return self.sendNotify(200, resp, method, url, True)
def _check_cache(self, method, url, headers):
data = None
if self._cfg['cacheTime'] and method=='GET':
data = memcache.get(url, namespace=self._cachename)
if data is not None:
if 'If-Modified-Since' in headers:
headers = httpheaders.HTTPHeaders()
self.sendResponse(304, headers, '', method, url)
else:
length = len(data[0])+len(data[1])+len(data[2])
logging.debug('Memcache hits: "%s %s" %d %d' % (method, url, 200, length))
self._send_data(data, length)
return True
return False
def _check_headers(self, respheaders):
for k in self._dirty_headers:
del respheaders[k]
if 'Set-Cookie' in respheaders:
scs = respheaders['Set-Cookie'].split(', ')
cookies = []
i = -1
for sc in scs:
if self._setcookie_re.match(sc): #r'[^ =]+ '
try:
cookies[i] = '%s, %s' % (cookies[i], sc)
except IndexError:
pass
else:
cookies.append(sc)
i += 1
respheaders['Set-Cookie'] = '\r\nSet-Cookie: '.join(cookies)
def _check_resplength(self, resp):
max_size = self._cfg['maxSize']
if max_size<=0 or len(resp.content)<=max_size: return
m = self._crange_re.search(resp.headers.get('Content-Range', ''))
if m:
m = m.groups()
start=int(m[0]); end=start+max_size-1; whole=int(m[2])
else:
start=0; end=max_size-1; whole=len(resp.content)
resp.status_code = 206
resp.headers['Content-Range'] = 'bytes %d-%d/%d' % (start, end, whole)
resp.content = resp.content[:max_size]
def post(self):
try:
request = self._cfg['crypto'].decrypt(self.request.body,
self._cfg['siteKey'])
request = self.load_data(zlib.decompress(request))
except:
return self.response.out.write('Hello World!')
method = request.get('method', '')
fetch_method = getattr(urlfetch, method)
if not hasattr(urlfetch, method):
return self.sendNotify(555, 'Invalid Method', method)
url = request.get('url', '')
if not url.startswith('http'):
return self.sendNotify(555, 'Unsupported Scheme', method, url)
if url.startswith('http://wallproxy/'):
return self.handle_wallproxy(method, url)
headers = request.get('headers', '')
if isinstance(headers, str):
headers = httpheaders.HTTPHeaders(headers)
if self._check_cache(method, url, headers): return
headers['Connection'] = 'close'
request.setdefault('range', '')
body = request.get('body', request.get('payload'))
deadline = self._deadline[1 if body else 0]
for i in xrange(self._try_times):
try:
resp = urlfetch.fetch(url, body, fetch_method,
headers, False, False, deadline)
break
except apiproxy_errors.OverQuotaError, e:
time.sleep(2)
except urlfetch.InvalidURLError, e:
return self.sendNotify(555, 'Invalid URL: %s' % e, method, url)
except urlfetch.ResponseTooLargeError, e:
if method == 'GET':
deadline = self._deadline[1]
if request['range']: headers['Range'] = request['range']
else:
return self.sendNotify(555, 'Response Too Large: %s' % e, method, url)
except Exception, e:
if i==0 and method=='GET':
deadline = self._deadline[1]
if request['range']: headers['Range'] = request['range']
else:
return self.sendNotify(555, 'Urlfetch error: %s' % e, method, url)
self._check_headers(resp.headers)
self._check_resplength(resp)
return self.sendResponse(resp.status_code, resp.headers,
resp.content, method, url)
def get(self):
self.redirect('http://twitter.com/hexieshe')
def main():
application = webapp.WSGIApplication([(r'/.*', MainHandler)], debug=True)
run_wsgi_app(application)
if __name__ == '__main__':
main() | Python |
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
__version__ = '0.4.2'
import gaeproxy
from gaeproxy import struct, zlib, logging, memcache
class MainHandler(gaeproxy.MainHandler):
_cfg = gaeproxy._init_config(gaeproxy.crypto.Crypto2)
_unquote_map = {'0':'\x10', '1':'=', '2':'&'}
def _quote(self, s):
return str(s).replace('\x10', '\x100').replace('=','\x101').replace('&','\x102')
def dump_data(self, dic):
return '&'.join('%s=%s' % (self._quote(k), self._quote(v)) for k,v in dic.iteritems())
def _unquote(self, s):
res = s.split('\x10')
for i in xrange(1, len(res)):
item = res[i]
try:
res[i] = self._unquote_map[item[0]] + item[1:]
except KeyError:
res[i] = '\x10' + item
return ''.join(res)
def load_data(self, qs):
pairs = qs.split('&')
dic = {}
for name_value in pairs:
if not name_value:
continue
nv = name_value.split('=', 1)
if len(nv) != 2:
continue
if len(nv[1]):
dic[self._unquote(nv[0])] = self._unquote(nv[1])
return dic
def _pack_data(self, code, headers, data):
ct = headers.get('Content-Type', '').lower()
headers = self.dump_data(headers)
info = struct.pack('>3I', code, len(headers), len(data))
data = ''.join((info, headers, data))
if ct.find('text')>=0 or ct.find('application')>=0:
cdata = zlib.compress(data)
data = '1'+cdata if len(data)>len(cdata) else '0'+data
else:
data = '0'+data
return data
def _send_data(self, data, length):
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.headers['Content-Length'] = str(length)
data = self._cfg['crypto'].encrypt(data, self._cfg['siteKey'])
self.response.out.write(data)
def sendResponse(self, code, headers, content, method, url):
need_cache = self._need_cache(method, code, headers)
data = self._pack_data(code, headers, content)
length = len(data)
if need_cache and length<1000000:
if not memcache.set(url, data, self._cfg['cacheTime'], namespace='wp_cache0'):
logging.warning('Memcache set wp_cache0(%s) failed' % url)
if code == 555:
logging.warning('Response: "%s %s" %s' % (method, url, content))
else:
logging.debug('Response: "%s %s" %d %d/%d' % (
method, url, code, len(content), length))
self._send_data(data, length)
def _check_cache(self, method, url, headers):
data = None
if self._cfg['cacheTime'] and method=='GET':
data = memcache.get(url, namespace='wp_cache0')
if data is not None:
if 'If-Modified-Since' in headers:
self.sendResponse(304, {}, '', method, url)
else:
length = len(data)
logging.debug('Memcache hits: "%s %s" %d %d' % (method, url, 200, length))
self._send_data(data, length)
return True
return False
def main():
application = gaeproxy.webapp.WSGIApplication([(r'/.*', MainHandler)], debug=True)
gaeproxy.run_wsgi_app(application)
if __name__ == '__main__':
main() | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
__version__ = '0.0.1'
import re
from util import urlinfo, urlfetch
def _sumRule(keyword, hash, share):
k = map(len, keyword)
hk = map(len, hash)
hv = [sum([len(v) for v in t.itervalues()]) for t in hash]
s = map(len, share)
num = 'k%d|%d+h%d:%d|%d:%d+s%d|%d=%d' % (k[0], k[1],
hk[0], hv[0], hk[1], hv[1], s[0], s[1], sum(k+hv+s))
return num
def _parseRule(rule):
keyword = set(), set()
hash = {}, {}
share = set(), set()
for line in rule.splitlines()[1:]:
# Ignore the first line ([AutoProxy x.x]), empty lines and comments
line = line.strip().lower()
if not line or line[0] in ('[','!','#'):
continue
rule_type = 1
if line.startswith('@@'): # Exceptions
line = line[2:]
rule_type = 0
if line[0]=='/' and line[-1]=='/': # Regular expressions
line = line[1:-1]
else:
# Strictly mapping to keyword blocking
line = re.sub(r'^(https?:)', r'|\1', line)
if line[0]!='|' and '*' not in line: # Maybe keyword
i1 = line.find('.'); i2 = line.find('/'); i3 = line.find('?')
if i1==-1 or (i2!=-1 and i2<i1) or (i2==-1 and i3!=-1):
keyword[rule_type].add(line)
continue
# Remove multiple wildcards
line = re.sub(r'\*+', '*', line)
# Remove anchors following separator placeholder
line = re.sub(r'\^\|$', '^', line, 1)
# Escape special symbols
line = re.sub(r'(\W)', r'\\\1', line)
# Replace wildcards by .*
line = re.sub(r'\\\*', '.*', line)
# Process separator placeholders
line = re.sub(r'\\\^', r'(?:[^\w\-.%\u0080-\uFFFF]|$)', line)
# Process extended anchor at expression start
line = re.sub(r'^\\\|\\\|', r'^[\w\-]+:\/+(?!\/)(?:[^\/]+\.)?', line, 1)
# Process anchor at expression start
line = re.sub(r'^\\\|', '^', line, 1)
# Process anchor at expression end
line = re.sub(r'\\\|$', '$', line, 1)
# Remove leading wildcards
line = re.sub(r'^(?:\.\*)', '', line, 1)
# Remove trailing wildcards
line = re.sub(r'(?:\.\*)$', '', line, 1)
if not line: continue # Invalid
# Regular expressions
idot = line.find('\\.')
if idot == -1: hash_key = None
else:
# Find domain field
istart = line.find(':') + 1
if istart > idot: istart = 0
iend = line.find('\\/', idot+2)
if iend == -1: iend = line.find('.*', idot+2)
tmp = line[istart:iend if iend!=-1 else None].replace('\\-', '-')
# Remove uncertain field
tmp = re.sub(r'(?:\(.*?\)\?)|(?:\(.*?\|.*?\))', '()', tmp)
tmp = re.sub(r'(?:[\w-]+\.?)?(?:\*|\?|\|)(?:[\w-]+)?', '.*', tmp)
tmp = re.findall(r'[\w-]{2,}', tmp)
# Try get a hash word
try:
hash_key = tmp.pop()
if tmp: hash_key = max(tmp, key=lambda x:len(x))
except IndexError:
hash_key = None
if hash_key:
if hash_key in hash[rule_type]:
hash[rule_type][hash_key].add(line)
else:
hash[rule_type][hash_key] = set([line])
else:
share[rule_type].add(line)
return (keyword,hash,share), _sumRule(keyword, hash, share)
def _fetchRule(url, proxy):
url = urlinfo.URL(url)
try:
if url.scheme == 'file':
import os, wpconfig
url.path = os.path.join(wpconfig.main_dir, url.path)
fp = open(url.path, 'rb')
date = None
else:
try:
fp = urlfetch.fetch(url, proxy=proxy)
except:
import time
time.sleep(10)
fp = urlfetch.fetch(url, proxy=proxy)
if fp.status != 200:
fp.close()
return None
date = fp.msg.getheader('last-modified')
rule = fp.read()
fp.close()
except:
return None
try:
tmp = rule.decode('base64')
tmp[5:15].decode('ascii')
rule = tmp
except:
pass
rule, num = _parseRule(rule)
return rule, '%s %s' % (num, date) if date else num
def initRule(rulelist):
keyword = [set(), set()]
hash = [{}, {}]
share = [set(), set()]
info = []
if isinstance(rulelist, basestring):
rulelist = (rulelist,)
for rule in rulelist:
if not rule: continue
if isinstance(rule, basestring):
url, proxy = rule, None
elif len(rule) == 1:
url, proxy = rule[0], None
else:
url, proxy = rule[0], rule[1]
res = _fetchRule(url, proxy)
if not res:
info.append((url, 'failed'))
continue
info.append((url, res[1]))
kw, hh, sh = res[0]
for i in xrange(2):
keyword[i] |= kw[i]
for k,v in hh[i].iteritems():
if k in hash[i]: hash[i][k] |= v
else: hash[i][k] = v
share[i] |= sh[i]
info.append(('Total', _sumRule(keyword, hash, share)))
return (keyword,hash,share), info
class jsRegExp:
def __init__(self, r):
self.r = r
def __json__(self):
return '/%s/' % self.r
def dump2js(o):
def iterdump(o):
if isinstance(o, (list, tuple, set)):
yield '['
i = len(o)
for v in o:
for v in iterdump(v): yield v
i -= 1
if i > 0: yield ', '
yield ']'
elif isinstance(o, dict):
yield '{'
i = len(o)
for k,v in o.iteritems():
for k in iterdump(k): yield k
yield ': '
for v in iterdump(v): yield v
i -= 1
if i > 0: yield ', '
yield '}'
elif isinstance(o, basestring):
yield '"%s"' % o.encode('string-escape')
elif isinstance(o, (int, long, float)):
yield str(o)
elif o is True: yield 'true'
elif o is False: yield 'false'
elif o is None: yield 'null'
else:
yield o.__json__()
return ''.join(iterdump(o))
def initRules(ruledict, callback, prefix1, prefix2):
infos = []
for key,rulelist in ruledict.iteritems():
rule, info = initRule(rulelist)
kw, hh, sh = rule
for i in xrange(2):
for k,v in hh[i].iteritems():
hh[i][k] = [callback(r) for r in v]
sh[i] = [callback(r) for r in sh[i]]
ruledict[key] = kw + hh + sh
for i,v in enumerate(info):
info[i] = '%s%s: %s' % (prefix2, v[0], v[1])
info = '\n'.join(info)
infos.append('%sRuleinfo for %s:\n%s' % (prefix1, key, info))
return '\n'.join(infos)
def generatePAC(ruledict, pacFile):
rulesBegin = '// AUTO-GENERATED RULES, DO NOT MODIFY!'
rulesEnd = '// END OF AUTO-GENERATED RULES'
defaultPacTemplate = '''//Proxy Auto Configuration
function FindProxyForURL(url, host) {
for (var p in RULES)
if (inAutoProxy(p, url, host)) return p;
return 'DIRECT';
}
function dnsDomainIs(host, domain) {
if (host == domain) return true;
if (domain.charAt(0) != '.') domain = '.' + domain;
return (host.length >= domain.length &&
host.substring(host.length - domain.length) == domain);
}
%(rulesBegin)s
%(rulesCode)s
%(rulesEnd)s
function inAutoProxy(r,u,h){u=u.toLowerCase();r=RULES[r];var s=u.split(":",1),k,i;if(s=="http"){k=r[0];i=k.length;while(--i>=0)if(u.indexOf(k[i])!=-1)return false}h=h.split(".");var j,t;k=r[2];j=h.length;while(--j>=0){i=h[j];if(i in k&&k[i].constructor==Array){t=k[i];i=t.length;while(--i>=0)if(t[i].test(u))return false}}k=r[4];i=k.length;while(--i>=0)if(k[i].test(u))return false;if(s=="http"){k=r[1];i=k.length;while(--i>=0)if(u.indexOf(k[i])!=-1)return true}k=r[3];j=h.length;while(--j>=0){i=h[j]; if(i in k&&k[i].constructor==Array){t=k[i];i=t.length;while(--i>=0)if(t[i].test(u))return true}}k=r[5];i=k.length;while(--i>=0)if(k[i].test(u))return true;return false};'''
try:
fp = open(pacFile, 'r')
template = fp.read().replace('%','%%')
fp.close()
except IOError:
template = defaultPacTemplate
else:
args = re.escape(rulesBegin), re.escape(rulesEnd)
pattern = r'(?ms)^(\s*%s\s*)^.*$(\s*%s\s*)$' % args
template, n = re.subn(pattern, r'\1%(rulesCode)s\2', template)
if n==0: template = defaultPacTemplate
args = {'rulesBegin': rulesBegin, 'rulesEnd': rulesEnd}
info = initRules(ruledict, jsRegExp, '// ', '// ')
args['rulesCode'] = '%s\nvar RULES = %s;' % (info, dump2js(ruledict))
print ' Writing PAC to file...'
fp = open(pacFile, 'w')
fp.write(template % args)
fp.close()
print ' Done!'
class Handler:
def __init__(self, ruledict):
print initRules(ruledict, re.compile, ' ', ' ')
self.ruledict = ruledict
def __call__(self, rule, url):
rule = self.ruledict[rule]
scheme = url.scheme
tokens = url.hostname.split('.')
url = url.geturl().lower()
if scheme == 'http':
for k in rule[0]:
if k in url:
return False
r = rule[2]
for k in tokens:
if k in r:
for k in r[k]:
if k.search(url):
return False
for k in rule[4]:
if k.search(url):
return False
if scheme == 'http':
for k in rule[1]:
if k in url:
return True
r = rule[3]
for k in tokens:
if k in r:
for k in r[k]:
if k.search(url):
return True
for k in rule[5]:
if k.search(url):
return True
return False
test = __call__
init_time = 100
plugin_name = 'AutoProxy'
def init_plugin(config):
if isinstance(config, dict):
return Handler(config)
import os, wpconfig
pacFile = os.path.join(wpconfig.main_dir, config[1])
generatePAC(config[0], pacFile) | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'ZHRtYWppYUAxNjMuY29t'.decode('base64')
__version__ = '0.0.1'
import forold
class Handler(forold.Handler):
crypto = forold._crypto.Crypto('XOR--0'); key = ''
def dump_data(self, dic):
return '&'.join('%s=%s' % (k, str(v).encode('hex')) for k, v in dic.iteritems())
def load_data(self, qs):
return dict((k, v.decode('hex')) for k, v in (x.split('=') for x in qs.split('&')))
def __init__(self, config):
config.pop('crypto', None)
self.password = config.pop('key', '')
forold.Handler.__init__(self, config)
def _process_request(self, req):
request, rawrange = forold.Handler._process_request(self, req)
del request['range']
request['password'] = self.password
return request, rawrange
init_time = 3
plugin_name = 'Plugin for GoAgent'
def init_plugin(config):
return forold.gaeproxy.init(Handler, config) | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
__version__ = '0.0.3'
import os, socket
from wpconfig import main_dir as cert_dir
cert_dir = os.path.join(cert_dir, 'cert')
try:
from OpenSSL import crypto
except ImportError:
crypto = None
try:
import ssl
except ImportError:
ssl = None
def _createKeyPair(type=None, bits=1024):
if type is None:
type = crypto.TYPE_RSA
pkey = crypto.PKey()
pkey.generate_key(type, bits)
return pkey
def _createCertRequest(pkey, subj, digest='sha1'):
req = crypto.X509Req()
subject = req.get_subject()
for k,v in subj.iteritems():
setattr(subject, k, v)
req.set_pubkey(pkey)
req.sign(pkey, digest)
return req
def _createCertificate(req, issuerKey, issuerCert, serial,
notBefore, notAfter, digest='sha1'):
cert = crypto.X509()
cert.set_serial_number(serial)
cert.gmtime_adj_notBefore(notBefore)
cert.gmtime_adj_notAfter(notAfter)
cert.set_issuer(issuerCert.get_subject())
cert.set_subject(req.get_subject())
cert.set_pubkey(req.get_pubkey())
cert.sign(issuerKey, digest)
return cert
def _makeCA(dump=True):
pkey = _createKeyPair(bits=2048)
subj = {'countryName': 'CN', 'organizationalUnitName': 'WallProxy Root',
'stateOrProvinceName': 'Internet', 'localityName': 'Cernet',
'organizationName': 'WallProxy', 'commonName': 'WallProxy CA'}
req = _createCertRequest(pkey, subj)
cert = _createCertificate(req, pkey, req, 0, 0, 60*60*24*7305) #20 years
if dump:
pkey = crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)
cert = crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
return pkey, cert
def _makeCert(host, cakey, cacrt, serial, dump=True):
pkey = _createKeyPair()
subj = {'countryName': 'CN', 'organizationalUnitName': 'WallProxy Branch',
'stateOrProvinceName':'Internet', 'localityName': 'Cernet',
'organizationName': host, 'commonName': host}
req = _createCertRequest(pkey, subj)
cert = _createCertificate(req, cakey, cacrt, serial, 0, 60*60*24*7305)
if dump:
pkey = crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)
cert = crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
return pkey, cert
def read_file(filename):
try:
f = open(filename, 'rb')
c = f.read()
f.close()
return c
except IOError:
return None
def write_file(filename, content):
try:
f = open(filename, 'wb')
f.write(str(content))
f.close()
except IOError:
pass
_g_serial = _g_CA = None
def checkCA():
if not crypto: return
global _g_serial, _g_CA
#Check cert directory
if not os.path.isdir(cert_dir):
if os.path.isfile(cert_dir):
os.remove(cert_dir)
os.mkdir(cert_dir)
#Check CA file
cakeyFile = os.path.join(cert_dir, '_ca.key')
cacrtFile = os.path.join(cert_dir, '_ca.crt')
serialFile = os.path.join(cert_dir, '_serial')
cakey = read_file(cakeyFile)
cacrt = read_file(cacrtFile)
_g_serial = read_file(serialFile)
try:
cakey = crypto.load_privatekey(crypto.FILETYPE_PEM, cakey)
cacrt = crypto.load_certificate(crypto.FILETYPE_PEM, cacrt)
_g_CA = cakey, cacrt
_g_serial = int(_g_serial)
except:
_g_CA = cakey, cacrt = _makeCA(False)
_g_serial = 0
#Remove old certifications, because ca and cert must be in pair
for name in os.listdir(cert_dir):
path = os.path.join(cert_dir, name)
if os.path.isfile(path):
os.remove(path)
cakey = crypto.dump_privatekey(crypto.FILETYPE_PEM, cakey)
cacrt = crypto.dump_certificate(crypto.FILETYPE_PEM, cacrt)
write_file(cakeyFile, cakey)
write_file(cacrtFile, cacrt)
write_file(serialFile, _g_serial)
def getCertificate(host):
keyFile = os.path.join(cert_dir, '%s.key' % host)
crtFile = os.path.join(cert_dir, '%s.crt' % host)
if not os.path.isfile(keyFile) or not os.path.isfile(crtFile):
if not crypto:
keyFile = os.path.join(cert_dir, '_ca.key')
crtFile = os.path.join(cert_dir, '_ca.crt')
return (keyFile, crtFile)
global _g_serial
_g_serial += 1
key, crt = _makeCert(host, _g_CA[0], _g_CA[1], _g_serial)
write_file(keyFile, key)
write_file(crtFile, crt)
write_file(os.path.join(cert_dir,'_serial'), _g_serial)
return keyFile, crtFile
class Handler:
def handle(self, handler, req):
if not ssl:
return req.send_error(501, 'ssl needs Python2.6 or later')
host = req.path.rsplit(':', 1)[0]
keyFile, crtFile = getCertificate(host)
req.connection.sendall('HTTP/1.1 200 OK\r\n\r\n')
try:
ssl_sock = ssl.wrap_socket(req.connection, keyFile, crtFile, True)
except ssl.SSLError, e:
return req.log_error('"%s" SSLError:%s', req.requestline, e)
addr = req.client_address[0],req.client_address[1],'https://'+req.path
try:
req.__class__(ssl_sock, addr, req.server)
ssl_sock.shutdown(socket.SHUT_WR)
except socket.error:
pass
finally:
ssl_sock.close()
init_time = 0
plugin_name = 'https to http'
def init_plugin(ignore):
print ' SSL module support:', 'YES' if ssl else 'NO'
print ' OpenSSL module support:', 'YES' if crypto else 'NO'
checkCA()
return Handler() | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
__patcher__ = 'ZHRtYWppYUAxNjMuY29t'.decode('base64')
__version__ = '0.0.4'
from util import proxylib
class Handler:
def __call__(self, host):
newhost = proxylib.map_hosts(host)
return newhost!=host
test = __call__
init_time = 0
plugin_name = 'local dns mapping'
def init_plugin(hosts_):
hosts = proxylib.hosts
tag = dict((m[0],i) for i,m in enumerate(hosts[1]))
old_items = set(hosts[0].items()), set(hosts[1])
for line in hosts_.splitlines():
line = line.strip().lower()
if not line or line.startswith('#'): continue
line = line.split()
if len(line) != 2: continue
ip, host = line
if host.startswith('.'):
if host in tag:
hosts[1][tag[host]] = host, ip
else:
tag[host] = len(hosts[1])
hosts[1].append((host, ip))
else:
hosts[0][host] = ip
new_items = set(hosts[0].items()), set(hosts[1])
c_items = new_items[0]-old_items[0], new_items[1]-old_items[1]
print ' updated %d,%d dns mapping' % (len(c_items[0]), len(c_items[1]))
return Handler()
| Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'ZHRtYWppYUAxNjMuY29t'.decode('base64')
__version__ = '0.0.2'
import gaeproxy
class Handler(gaeproxy.Handler):
def dump_data(self, dic):
return '&'.join('%s=%s' % (k,str(v).encode('hex')) for k,v in dic.iteritems())
def load_data(self, qs):
return dict((k,v.decode('hex')) for k,v in (x.split('=') for x in qs.split('&'))) if qs else {}
def _process_request(self, req):
data = req.read_body()
rawrange, range = self._process_range(req.headers)
if req.command=='GET' and self.add_range(req.url, req.headers):
req.headers['Range'] = range
request = {'method':req.command, 'url':req.url.geturl(), 'body':data,
'headers':req.headers, 'range':range}
return request, rawrange
init_time = 2
plugin_name = 'Simple packer for gaeproxy'
def init_plugin(config):
return gaeproxy.init(Handler, config)
| Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
__version__ = '0.0.1'
import random
import socket
import struct
from util import proxylib, urlfetch
class Handler:
def __init__(self, proxy):
self.proxy = proxylib.Proxy(proxy)
def handle(self, handler, req):
if isinstance(handler, (list, tuple)):
handler = random.choice(handler)
handler = getattr(handler, 'handle_%s_request' % req.request)
if req.request!='http' and self.proxy.type[0]=='http':
req.log_error('Please select a socks proxy for %s', req.address)
else:
handler(req)
def handle_http_request(self, req):
if req.url.scheme == 'https':
if self.proxy.type[0] == 'http':
return req.send_error(417, 'http proxy does not support https')
opener = urlfetch.HTTPSFetch(self.proxy, False,
req.server.server_address[1])
else:
opener = urlfetch.HTTPFetch(self.proxy, False,
req.server.server_address[1])
#opener.set_debuglevel(1)
data = req.read_body()
try:
resp = opener.open(req.url, data, req.command, req.headers)
req.write_response(resp.status, resp.msg, resp.reason, resp.length)
data = resp.read(8192)
while data:
req.wfile.write(data)
data = resp.read(8192)
resp.close()
except proxylib.ProxyError, e:
if e.args[0] == 10:
return req.send_error(200, 'WallProxy Local Server is Running')
req.send_error(502, 'Connect porxy/host failed')
except socket.error, e:
req.log_error('"%s" %s', req.requestline, e)
req.wfile.close()
raise
def handle_socks4_request(self, req):
try:
sock = self.proxy.connect(req.address, req.command,
req.server.server_address[1])
except proxylib.ProxyError, e:
e = {4:'\x5c', 9:'\x5d'}.get(e.args[0], '\x5b')
return req.end_socks('\x00%s\x00\x00\x00\x00\x00\x00' % e)
ip, port = sock.getproxypeer()
try:
ip = socket.gethostbyname(ip)
except socket.error:
ip = '0.0.0.0'
ip = socket.inet_aton(ip)
req.end_socks('\x00\x5a%s%s'%(struct.pack('>H',port),ip), True)
req.copy_sock(sock)
def handle_socks5_request(self, req):
try:
sock = self.proxy.connect(req.address, req.command,
req.server.server_address[1])
except proxylib.ProxyError, e:
e = e.args[0]
if type(e)!=int or e<1 or e>8: e = 1
return req.end_socks('\x05%s\x00\x01\x00\x00\x00\x00\x00\x00'
% chr(e))
ip, port = sock.getproxypeer()
if ':' in ip:
try:
ip = '\x04' + socket.inet_pton(socket.AF_INET6, ip)
except socket.error:
ip = '\x01\x00\x00\x00\x00'
else:
try:
ip = '\x01' + socket.inet_aton(ip) #IPv4
except socket.error:
ip = '\x03%s%s' % (chr(len(ip)), ip) #domain
req.end_socks('\x05\x00\x00%s%s' % (ip, struct.pack('>H',port)), True)
req.copy_sock(sock)
def handle_https_request(self, req):
try:
sock = self.proxy.connect(req.address, req.command,
req.server.server_address[1])
except proxylib.ProxyError, e:
return req.send_error(502, 'Connect porxy/host failed')
req.log_request(200)
req.connection.sendall('HTTP/1.0 200 OK\r\n\r\n')
req.copy_sock(sock)
init_time = 0
plugin_name = 'General Proxy'
def init_plugin(proxies):
proxy = [None] * len(proxies)
for i,p in enumerate(proxies):
proxy[i] = Handler(p)
return proxy | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
__patcher__ = 'ZHRtYWppYUAxNjMuY29t'.decode('base64')
__version__ = '0.0.6'
from util import crypto as _crypto, httpheaders, proxylib, urlfetch, urlinfo
import zlib, time, re, struct, random
import cPickle as pickle
import threading
class Handler:
_dirty_headers = ('connection', 'proxy-connection', 'proxy-authorization',
'content-length', 'host', 'vary', 'via', 'x-forwarded-for')
_range_re = re.compile(r'(\d+)?-(\d+)?')
_crange_re = re.compile(r'bytes\s+(\d+)-(\d+)/(\d+)')
crypto = _crypto.Crypto('XOR--32'); key = ''
proxy = proxylib.Proxy()
headers = httpheaders.HTTPHeaders('Content-Type: application/octet-stream')
range0 = 100000; range = 500000; max_threads = 10
def __init__(self, config):
dic = {'crypto': _crypto.Crypto, 'key': lambda v:v,
'proxy': proxylib.Proxy, 'headers': httpheaders.HTTPHeaders,
'range0': lambda v:v if v>=10000 else self.__class__.range0,
'range': lambda v:v if v>=100000 else self.__class__.range,
'max_threads': lambda v:v if v>0 else self.__class__.max_threads,}
self.url = urlinfo.URL(config['url'])
for k,v in dic.iteritems():
if k in config:
setattr(self.__class__, k, v(config[k]))
setattr(self, k, getattr(self.__class__, k))
opener_cls = urlfetch.HTTPSFetch if (self.url.scheme ==
'https') else urlfetch.HTTPFetch
self.opener = opener_cls(self.proxy, True)
del self.proxy
def __str__(self):
return ' %s %s %d %d %d' % (self.url.geturl(), self.crypto.getmode(),
self.range0, self.range, self.max_threads)
def _process_range(self, headers):
for k in self._dirty_headers:
del headers[k]
range = headers.get('Range', '')
m = self._range_re.search(range)
if m:
m = m.groups()
if m[0] is None:
if m[1] is None: m = None
else:
m = 1, int(m[1])
if m[1] > self.range0: range = 'bytes=-1024'
else:
if m[1] is None:
m = 0, int(m[0])
range = 'bytes=%d-%d' % (m[1], m[1]+self.range0-1)
else:
m = 2, int(m[0]), int(m[1])
if m[2]-m[1]+1 > self.range0:
range = 'bytes=%d-%d' % (m[1], m[1]+self.range0-1)
if m is None:
range = 'bytes=0-%d' % (self.range0 - 1)
return m, range
def _process_request(self, req):
data = req.read_body()
rawrange, range = self._process_range(req.headers)
headers = httpheaders.HTTPHeaders(req.headers).__getstate__()
if req.command=='GET' and self.add_range(req.url, req.headers):
headers['Range'] = range
request = {'method':req.command, 'url':req.url.geturl(), 'body':data,
'headers':headers, 'range':range}
return request, rawrange
def _fetch(self, data):
data = self.crypto.encrypt(data, self.key)
try:
resp = self.opener.open(self.url, data, 'POST', self.headers)
except proxylib.ProxyError, e:
return -1, 'Connect proxy/host failed'
except Exception, e:
return -1, str(e)
if resp.status != 200:
resp.read(); resp.close()
return -1, '%s: %s' % (resp.status, resp.reason)
return 0, resp
def fetch(self, data):
data, resp = self._fetch(data)
if data == -1: return data, resp
crypto = self.crypto.getcrypto(self.key)
headers = httpheaders.HTTPHeaders()
try:
raw_data = resp.read(7)
zip, code, hlen = struct.unpack('>BHI', raw_data)
if zip == 1:
data = self.crypto.unpaddata(crypto.decrypt(resp.read()))
data = zlib.decompress(data)
content = data[hlen:]
if code == 555:
raise ValueError('Server: '+content)
headers.__setstate__(self.load_data(data[:hlen]))
resp.close()
return 1, code, headers, content
elif zip == 0:
if code == 555:
content = crypto.decrypt(resp.read()[hlen:])
raise ValueError('Server: '+self.crypto.unpaddata(content))
h = crypto.decrypt(resp.read(hlen))
headers.__setstate__(self.load_data(self.crypto.unpaddata(h)))
return 0, code, headers, (resp, crypto)
else:
raw_data += resp.read()
raise ValueError('Data format not match(%s:%s)'%(self.url.geturl(), raw_data))
except Exception, e:
resp.close()
return -1, str(e)
def read_data(self, type, data):
if type == 1: return data
resp, crypto = data
data = self.crypto.unpaddata(crypto.decrypt(resp.read()))
resp.close()
return data
def write_data(self, req, type, data):
try:
if type == 1:
req.wfile.write(data)
else:
resp, crypto = data
size = self.crypto.getsize(16384)
data = crypto.decrypt(resp.read(size))
req.wfile.write(self.crypto.unpaddata(data))
data = resp.read(size)
while data:
req.wfile.write(crypto.decrypt(data))
data = resp.read(size)
resp.close()
except proxylib.socket.error:
req.wfile.close()
raise
def _need_range_fetch(self, req, res, range):
headers = res[2]
m = self._crange_re.search(headers.get('Content-Range', ''))
if not m: return None
m = map(int, m.groups())#bytes %d-%d/%d
if range is None:
start=0; end=m[2]-1
code = 200
del headers['Content-Range']
else:
if range[0] == 0: #bytes=%d-
start=range[1]; end=m[2]-1
elif range[0] == 1: #bytes=-%d
start=m[2]-range[1]; end=m[2]-1
else: #bytes=%d-%d
start=range[1]; end=range[2]
code = 206
headers['Content-Range'] = 'bytes %d-%d/%d' % (start, end, m[2])
headers['Content-Length'] = str(end-start+1)
req.write_response(code, headers, size=headers['Content-Length'])
if start == m[0]: #Valid
self.write_data(req, res[0], res[3])
start = m[1] + 1
return start, end
def _range_fetch(self, req, handler, request, start, end):
t = time.time()
if self.__range_fetch(req, handler, request, start, end):
t = time.time() - t
t = (end - start + 1) / 1000.0 / 1000 / t
print '>>>>>>>>>> Range Fetch ended (all @ %sM/s)' % t
else:
req.close_connection = 1
print '>>>>>>>>>> Range Fetch failed'
def __range_fetch(self, req, handler, request, start, end):
request['range'] = '' # disable server auto-range-fetch
i, s, thread_size, tasks = 0, start, len(handler)*2, []
while s <= end:
e = s + (i < thread_size and self.range0 or self.range) - 1
if e > end: e = end
tasks.append((i, s, e))
i += 1; s = e + 1
task_size = len(tasks)
thread_size = min(task_size, thread_size, self.max_threads)
print ('>>>>>>>>>> Range Fetch started: threads=%d blocks=%d '
'bytes=%d-%d' % (thread_size, task_size, start, end))
if thread_size == 1:
return self._single_fetch(req, handler, request, tasks)
handler = list(handler); random.shuffle(handler)
if thread_size > len(handler): handler *= 2
results = [None] * task_size
mutex = threading.Lock()
threads = {}
for i in xrange(thread_size):
t = threading.Thread(target=handler[i]._range_thread,
args=(request, tasks, results, threads, mutex))
threads[t] = set([])
t.setDaemon(True)
for t in threads: t.start()
i = 0; t = False
while i < task_size:
if results[i] is not None:
try:
self.write_data(req, 1, results[i])
results[i] = None
i += 1
continue
except:
mutex.acquire()
del tasks[:]
mutex.release()
break
if not threads: #All threads failed
if t: break
t = True; continue
time.sleep(1)
else:
return True
return False
def _single_fetch(self, req, handler, request, tasks):
try:
for task in tasks:
request['headers']['Range'] = 'bytes=%d-%d' % task[1:]
data = zlib.compress(self.dump_data(request))
for i in xrange(3):
self = random.choice(handler)
res = self.fetch(data)
if res[0] == -1:
time.sleep(2)
elif res[1] == 206:
#print res[2]
print '>>>>>>>>>> block=%d bytes=%d-%d' % task
self.write_data(req, res[0], res[3])
break
else:
raise StopIteration('Failed')
except:
return False
return True
def _range_thread(self, request, tasks, results, threads, mutex):
ct = threading.current_thread()
while True:
mutex.acquire()
try:
if threads[ct].intersection(*threads.itervalues()):
raise StopIteration('All threads failed')
for i,task in enumerate(tasks):
if task[0] not in threads[ct]:
task = tasks.pop(i)
break
else:
raise StopIteration('No task for me')
request['headers']['Range'] = 'bytes=%d-%d' % task[1:]
data = self.dump_data(request)
except StopIteration, e:
#print '>>>>>>>>>> %s: %s' % (ct.name, e)
del threads[ct]
break
finally:
mutex.release()
data = zlib.compress(data)
success = False
for i in xrange(2):
res = self.fetch(data)
if res[0] == -1:
time.sleep(2)
elif res[1] == 206:
try: data = self.read_data(res[0], res[3])
except: continue
if len(data) == task[2]-task[1]+1:
success = True
break
mutex.acquire()
if success:
print '>>>>>>>>>> block=%d bytes=%d-%d'%task, len(data)
results[task[0]] = data
else:
threads[ct].add(task[0])
tasks.append(task)
tasks.sort(key=lambda x: x[0])
mutex.release()
def dump_data(self, data):
return pickle.dumps(data, 1)
def load_data(self, data):
return pickle.loads(data)
def handle(self, handler, req):
if not isinstance(handler, (list, tuple)):
handler = handler,
if len(handler) == 1:
handlers = handler[0], handler[0]
else:
handlers = random.sample(handler, 2)
request, range = self._process_request(req)
data = zlib.compress(self.dump_data(request))
errors = []
for self in handlers:
res = self.fetch(data)
if res[0]!=-1: break
errors.append(res[1])
if res[1].find('10054')!=-1 and self.url.scheme!='https':
print 'Use https instead of http automatically.'
for h in handler:
h.url.scheme = 'https'
if h.url.port == 80: h.url.port = 443
h.opener = urlfetch.HTTPSFetch(h.opener.proxy)
hosts = proxylib.hosts[1]
try:
i = hosts.index(('.appspot.com', 'www.google.cn'))
except ValueError:
pass
else:
hosts[i] = ('.appspot.com', 'www.google.com.hk')
#print handler[0], proxylib.map_hosts('.appspot.com')
else:
return req.send_error(502, str(errors))
if res[1]==206 and req.command=='GET':
data = self._need_range_fetch(req, res, range)
if data:
start, end = data
if start > end: return #end
return self._range_fetch(req, handler, request, start, end)
req.write_response(res[1], res[2], size=res[2].get('Content-Length'))
self.write_data(req, res[0], res[3])
init_time = 1
plugin_name = 'Proxy based on GAE'
def init(cls, config):
import traceback, wpconfig
add_range = wpconfig.config['add_range']
Handler.add_range = lambda self,u,h: add_range(u,h)
server = [None] * len(config)
for i,v in enumerate(config):
if isinstance(v, basestring):
v = {'url': v}
try:
server[i] = cls(v)
print server[i]
except:
traceback.print_exc()
return server
def init_plugin(config):
return init(Handler, config) | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
__version__ = '0.4.1'
from util import crypto as _crypto, httpheaders
import gaeproxy
import zlib, struct
class Handler(gaeproxy.Handler):
crypto = _crypto.Crypto2('XOR--32')
_unquote_map = {'0':'\x10', '1':'=', '2':'&'}
def _quote(self, s):
return str(s).replace('\x10', '\x100').replace('=','\x101').replace('&','\x102')
def dump_data(self, dic):
return '&'.join('%s=%s' % (self._quote(k), self._quote(v)) for k,v in dic.iteritems())
def _unquote(self, s):
res = s.split('\x10')
for i in xrange(1, len(res)):
item = res[i]
try:
res[i] = self._unquote_map[item[0]] + item[1:]
except KeyError:
res[i] = '\x10' + item
return ''.join(res)
def load_data(self, qs):
pairs = qs.split('&')
dic = {}
for name_value in pairs:
if not name_value:
continue
nv = name_value.split('=', 1)
if len(nv) != 2:
continue
if len(nv[1]):
dic[self._unquote(nv[0])] = self._unquote(nv[1])
return dic
def __init__(self, config):
if 'crypto' in config:
self.__class__.crypto = _crypto.Crypto2(config.pop('crypto'))
gaeproxy.Handler.__init__(self, config)
def _process_request(self, req):
data = req.read_body()
rawrange, range = self._process_range(req.headers)
if req.command=='GET' and self.add_range(req.url, req.headers):
req.headers['Range'] = range
request = {'url':req.url.geturl(), 'method':req.command,
'headers':req.headers, 'payload':data, 'range':range}
return request, rawrange
def fetch(self, data):
data, resp = self._fetch(data)
if data == -1: return data, resp
try:
raw_data = resp.read(); resp.close()
data = self.crypto.decrypt(raw_data, self.key)
if data[0] == '0':
data = data[1:]
elif data[0] == '1':
data = zlib.decompress(data[1:])
else:
return -1, 'Data format not match(%s:%s)' % (self.url.geturl(),raw_data)
code, hlen, clen = struct.unpack('>3I', data[:12])
if len(data) != 12+hlen+clen:
return -1, 'Data length not match'
content = data[12+hlen:]
if code == 555: #Urlfetch Failed
return -1, 'Server: '+content
headers = httpheaders.HTTPHeaders(self.load_data(data[12:12+hlen]))
return 1, code, headers, content
except Exception, e:
return -1, str(e)
init_time = 2
plugin_name = 'Plugin for WallProxy 0.4.0'
def init_plugin(config):
return gaeproxy.init(Handler, config) | Python |
#!/usr/bin/env python
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
__patcher__ = 'ZHRtYWppYUAxNjMuY29t'.decode('base64')
__version__ = '1.0.5'
import SocketServer
import BaseHTTPServer
import struct
import socket
import select
import threading
from util import urlinfo
import wpconfig
class ProxyServer(SocketServer.ThreadingTCPServer):
allow_reuse_address = BaseHTTPServer.HTTPServer.allow_reuse_address
daemon_threads = True
def __init__(self, config, RequestHandlerClass):
server_address = config.get('listen', ('127.0.0.1', 8086))
self.keep_alive = config.get('keep_alive', True)
self.logger = wpconfig.get_logger(config.get('log_file'),
config.get('log_format', '%(clientip)s - [%(asctime)s] '
'%(levelname)-5s %(message)s'), config.get('log_size', 1024),
config.get('log_backup', 0))
SocketServer.TCPServer.__init__(self, server_address,
RequestHandlerClass)
def server_bind(self):
SocketServer.TCPServer.server_bind(self)
self.server_address = self.server_address[:2]
def __str__(self):
info = ['main_ver: %s' % __version__,
'listen: %s:%s' % self.server_address,
'keep_alive: %s' % self.keep_alive]
return '\n'.join(info)
class ProxyRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def log_error(self, format, *args):
self.server.logger.error(format%args, extra={'clientip':self.client_address[0]})
def log_message(self, format, *args):
self.server.logger.info(format%args, extra={'clientip':self.client_address[0]})
def parse_request(self):
if not self.server.keep_alive:
self.close_connection = 1
return BaseHTTPServer.BaseHTTPRequestHandler.parse_request(self)
if not BaseHTTPServer.BaseHTTPRequestHandler.parse_request(self):
return False
pcon = self.headers.get('proxy-connection', '').lower()
if 'close' in pcon:
self.close_connection = 1
elif 'keep-alive' in pcon:
self.close_connection = 0
return True
def write_response(self, code, headers, msg='', size=None):
self.log_request(code, '-' if size is None else size)
if self.request_version != 'HTTP/0.9':
if not msg and code in self.responses:
msg = self.responses[code][0]
if 'Content-Length' not in headers:
self.close_connection = 1
con = ('Connection', 'Proxy-Connection')
del headers[con[int(self.path[0]=='/')]]
if self.close_connection:
del headers['Keep-Alive']
headers[con[int(self.path[0]!='/')]] = 'close'
else:
headers[con[int(self.path[0]!='/')]] = 'keep-alive'
self.wfile.write('HTTP/1.0 %d %s\r\n%s\r\n' % (code, msg, headers))
def handle(self):
try:
req = self.connection.recv(1)
if req == '\x04': # socks4
self.handle_socks4_request()
elif req == '\x05': # socks5
self.handle_socks5_request()
else:
self.raw_requestline = req + self.rfile.readline()
if not self.parse_request():
return
if self.command == 'CONNECT':
self.handle_https_request()
else:
self.handle_http_request()
while not self.close_connection:
self.raw_requestline = self.rfile.readline()
if not self.parse_request(): break
self.handle_http_request()
except socket.error:
pass
def _read(self, size):
data = ''
while size > 0:
try:
tmp = self.connection.recv(size)
except socket.error, e:
if e.args[0] == socket.EINTR:
continue
raise
if not tmp: break
data += tmp
size -= len(tmp)
return data
def copy_sock(self, sock, max_idle=180):
lst = [self.connection, sock]
count = 0
while count < max_idle:
count += 1
r, w, e = select.select(lst, [], lst, 1)
if e: break
if not r: continue
for s in r:
out = self.connection if s is sock else sock
data = s.recv(8192)
if data:
out.sendall(data)
count = 0
def end_socks(self, resp=None, success=False):
log = self.log_message if success else self.log_error
if hasattr(self, 'address'):
log('%s for %s %s', self.request, self.address, self.command)
else:
log('try %s failed' % self.request)
if resp:
self.connection.sendall(resp)
def _handle_with_plugin(self, obj):
handler = obj[0] if isinstance(obj, (list, tuple)) else obj
handler.handle(obj, self)
def handle_socks4_request(self):
self.request = 'socks4'
req = self._read(7)
self.command, port = struct.unpack('>BH', req[:3])
ip = socket.inet_ntoa(req[3:7])
req = self._read(1)
while req[-1] != '\x00':
req += self._read(1)
userid = req[:-1]
if ip.startswith('0.0.0.') and not ip.endswith('.0'):
req = self._read(1)
while req[-1] != '\x00':
req += self._read(1)
ip = req[:-1]
self.address = ip, port
if not wpconfig.check_client(self.client_address[0], self.request,
(self.command, ip, port, userid)):
return self.end_socks('\x00\x5d\x00\x00\x00\x00\x00\x00')
handler = wpconfig.find_sock_handler(self.request,ip,port,self.command)
if not handler:
return self.end_socks('\x00\x5b\x00\x00\x00\x00\x00\x00')
self._handle_with_plugin(handler)
def handle_socks5_request(self):
self.request = 'socks5'
req = self._read(ord(self._read(1)))
if '\x02' in req: #username/password authentication
self.connection.sendall('\x05\x02')
req = self._read(2)
req = self._read(ord(req[1])+1)
username = req[:-1]
password = self._read(ord(req[-1]))
self.connection.sendall('\x01\x00')
elif '\x00' in req:
username, password = None, None
self.connection.sendall('\x05\x00')
else:
return self.end_socks('\x05\xff')
req = self._read(4)
self.command = ord(req[1])
if req[3] == '\x01': #IPv4 address
ip = socket.inet_ntoa(self._read(4))
elif req[3] == '\x03': #Domain name
ip = self._read(ord(self._read(1)))
elif req[3] == '\x04': #IPv6 address
ip = socket.inet_ntop(socket.AF_INET6, self._read(16))
else:
return self.end_socks('\x05\x08\x00\x01\x00\x00\x00\x00\x00\x00')
port = struct.unpack('>H', self._read(2))[0]
self.address = ip, port
handler = wpconfig.find_sock_handler(self.request,ip,port,self.command)
if not handler or not wpconfig.check_client(self.client_address[0],
self.request, (self.command, ip, port, username, password)):
return self.end_socks('\x05\x02\x00\x01\x00\x00\x00\x00\x00\x00')
self._handle_with_plugin(handler)
def handle_https_request(self):
self.request = 'https'
self.address = ip, port = urlinfo.parse_netloc(self.path, 'https')[:2]
self.command = 1
if not wpconfig.check_client(self.client_address[0], self.request,
(ip, port, self.headers)):
return self.send_error(407)
handler = wpconfig.find_sock_handler(self.request, ip, port, 1)
if not handler:
return self.send_error(417, 'find_sock_handler return None')
self._handle_with_plugin(handler)
def read_body(self):
try:
length = int(self.headers.get('content-length', 0))
except ValueError:
length = 0
return self.rfile.read(length) if length>0 else ''
def handle_http_request(self):
self.request = 'http'
if self.path[0] == '/':
if (len(self.client_address) > 2 and
str(self.client_address[2]).startswith('https://')):
self.url = self.client_address[2] + self.path
else:
self.url = 'http://%s%s' % (self.headers.get('host'),self.path)
else:
self.url = self.path
self.url = urlinfo.URL(self.url)
if not wpconfig.check_client(self.client_address[0], self.request,
(self.command, self.url, self.headers)):
return self.send_error(407)
handler = wpconfig.find_http_handler(self.command,self.url,self.headers)
if not handler:
return self.send_error(417, 'find_http_handler return None')
self._handle_with_plugin(handler)
def main():
msg = '-' * 78
httpd = ProxyServer(wpconfig.get_config(), ProxyRequestHandler)
print msg
print httpd
print msg
wpconfig.set_config(False)
t = threading.Thread(target=httpd.serve_forever)
t.setDaemon(True); t.start(); del t
wpconfig.set_config(True)
print msg
try:
wpconfig.watch_config(msg)
except KeyboardInterrupt:
pass
finally:
try:
httpd.shutdown()
except AttributeError:
pass
if __name__ == '__main__':
main() | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
__patcher__ = 'ZHRtYWppYUAxNjMuY29t'.decode('base64')
import os, traceback
def main_dir():
import sys, imp
if hasattr(sys, 'frozen') or imp.is_frozen('__main__'):
return os.path.abspath(os.path.dirname(sys.executable))
return os.path.abspath(os.path.dirname(sys.argv[0]))
main_dir = main_dir()
conf_file = os.path.join(main_dir, 'proxy.conf')
_default_config = '''
import re
server, plugins = {'log_file': ''}, {}
def __init__(plugin): pass
__del__ = set(['server', 'plugins', '__init__', '__del__'])
hosts = 'www.google.cn .appspot.com'
plugins['plugins.hosts'] = 'hosts'
def use_gae_https(gaeproxy):
httpsproxy = []
for s in gaeproxy:
if isinstance(s, basestring): s = {'url': s}
httpsproxy.append(s.copy())
httpsproxy[-1]['url'] = httpsproxy[-1]['url'].replace('http:', 'https:')
return httpsproxy
exec """eJyFz0tPg0AUBeCfMwtboLX4IHFRRlNc0IRgtdUYMwwzToHpXOehUNP/Lutiwvaek+/kfh
IGWrXd3dsvcrpBERLWQuT7R26Y5B4BMKCsR5X0ObNUeCAATVDNur5LFkEZ0KsyvA5u6ZzfFH1CdQdW
9eHyIZ/iGE8v5/1VkvbDCs1IaVAUniZna1WjrZMja5jTGPLcsW8drw71Bg2YRrVWqhHGaJ3ATK5x3N
DNFrsho0iVvdoRppDbVRg/Hl6ykmf74+4fptpT60afqvMmoD8mSNP0foGHDLgvLcgII5I6eZ5d0Cdr
lnhdlOj0/gdfH6DY""".decode('base64').decode('zlib')
__del__.add('use_gae_https'); plugins['plugins.gaeproxy'] = 'gaeproxy'
server_type = 'gaeproxy'; __del__.add('server_type')
def add_range(url, headers):
if dnsDomainIs(url.hostname, 'c.youtube.com'): return True
return False
autoproxy = {}
autoproxy['PROXY 127.0.0.1:8086; DIRECT'] =(
('http://autoproxy-gfwlist.googlecode.com/svn/trunk/gfwlist.txt','http://127.0.0.1:8086'),
'file://userlist.ini',)
autoproxy = autoproxy, 'proxy.pac'; __del__.add('autoproxy')
plugins['plugins.autoproxy'] = 'autoproxy'
rawproxy = (None,)
plugins['plugins.rawproxy'] = 'rawproxy'
fakehttps = None
plugins['plugins.fakehttps'] = 'fakehttps'
def check_client(ip, reqtype, args): return True
def find_http_handler(method, url, headers):
if method not in ('GET', 'POST', 'HEAD', 'PUT', 'DELETE'): return rawproxy[0]
return gaeproxy
def find_sock_handler(reqtype, ip, port, cmd):
if reqtype == 'https': return fakehttps
return rawproxy[0]
def dnsDomainIs(host, domain):
if host == domain: return True
if domain[0] != '.': domain = '.' + domain
return host.endswith(domain)
'''
config = {}
def get_config():
global config
for key in config: config[key] = None #gc
import __builtin__
config = {
'__builtins__': __builtin__, '__file__': conf_file,
'__name__': __name__+'.conf',
}
exec _default_config in config
try:
execfile(conf_file, config)
except:
traceback.print_exc()
return config['server']
def _init_plugin(plugins):
init = config['__init__']
plugins.sort(key=lambda x:x[0].init_time)
for mod,cfg in plugins:
try:
if cfg in config:
print 'Initializing "%s#%s" with "%s":'%(mod.__name__,
mod.__version__, cfg), mod.plugin_name
config[cfg] = mod.init_plugin(config[cfg])
else:
print 'Initializing "%s":'%mod.__name__, mod.plugin_name
mod.init_plugin()
init(mod.__name__)
except:
traceback.print_exc()
def set_config(call_time):
global check_client, find_sock_handler, find_http_handler
check_client = config['check_client']
find_sock_handler = config['find_sock_handler']
find_http_handler = config['find_http_handler']
plugins = [], []
for mod,cfg in config['plugins'].iteritems():
try:
mod = __import__(mod, fromlist='x')
plugins[int(mod.init_time>=50)].append((mod, cfg))
except ImportError, e:
print 'ImportError:', e
if call_time==2 or not call_time:
_init_plugin(plugins[0])
if call_time:
_init_plugin(plugins[1])
for k in config['__del__']:
if k in config:
del config[k]
def watch_config(msg='', interval=5):
import time
def getmtime():
try: return os.path.getmtime(conf_file)
except: return 0
mtime = getmtime()
while True:
time.sleep(interval)
_mtime = getmtime()
if mtime != _mtime:
print msg
get_config(); set_config(2)
print msg
mtime = _mtime
def get_logger(filename, format, maxKB, backupCount):
if filename == '':
class Logger:
def __getattr__(self, name):
return lambda *args, **kwargs: None
return Logger()
import logging
import logging.handlers
logger = logging.getLogger('WallProxy')
logger.setLevel(logging.INFO)
if filename:
filename = os.path.join(main_dir, filename)
handler = logging.handlers.RotatingFileHandler(filename,
maxBytes=maxKB*1024, backupCount=backupCount)
else:
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(format))
logger.addHandler(handler)
return logger | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
from UserDict import IterableUserDict
__all__ = ['HTTPHeaders']
class HTTPHeaders(IterableUserDict):
def __setitem__(self, key, item):
self.data[key.title()] = item
def add(self, key, item):
key = key.title()
if key in self.data:
self.data[key] = '%s\r\n%s: %s' % (self.data[key], key, item)
else:
self.data[key] = item
def __delitem__(self, key):
try:
del self.data[key]
except KeyError:
pass
def readheaders(self, fp):
if isinstance(fp, basestring):
fp = fp.splitlines()
for line in fp:
k, s, v = line.partition(':')
if not s: break
self.add(k, v.strip())
def update(self, dic=None, **kwargs):
if not dic:
pass
elif isinstance(dic, HTTPHeaders):
self.data.update(dic.data)
elif isinstance(dic, basestring) or hasattr(dic, 'readline'):
self.readheaders(dic)
else:
try:
for k in dic.keys():
self[k] = dic[k]
except AttributeError:
for k,v in dic:
self.add(k, v)
if kwargs:
self.update(kwargs)
def __str__(self):
buf = [None] * len(self.data)
for i,v in enumerate(self.data.iteritems()):
buf[i] = '%s: %s\r\n' % v
return ''.join(buf)
def __getstate__(self):
return self.data
def __setstate__(self, state):
self.data = state | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
import httplib
import threading
import httpheaders, proxylib, urlinfo
from httplib import error
def addheader(self, key, value):
prev = self.dict.get(key)
if prev is None:
self.dict[key] = value
else:
self.dict[key] = '%s\r\n%s: %s' % (prev, key.title(), value)
httplib.HTTPMessage.addheader = addheader; del addheader
__all__ = ['HTTPFetch', 'fetch']
class HTTPFetch(httplib.HTTPConnection, threading.local):
_user_agent = 'Mozilla/5.0'
def __init__(self, proxy=None,keep_alive=False,selfport=None,timeout=-1):
if not isinstance(proxy, proxylib.Proxy):
proxy = proxylib.Proxy(proxy, timeout)
elif timeout is None or timeout>=0:
proxy.timeout = timeout
self.proxy = proxy
self.keep_alive = keep_alive
self.selfport = selfport
self.sock = None
self._buffer = []
self._HTTPConnection__response = None
self._HTTPConnection__state = httplib._CS_IDLE
self.addr = None
def __del__(self):
self.close()
def connect(self):
self.sock = self.proxy.connect(self.addr, 1, self.selfport)
def putrequest(self, method, url, headers):
proxy_type, proxy_auth = self.proxy.type
if proxy_type != 'http':
addr = url.hostname, url.port
if addr != self.addr:
self.close()
self.addr = addr
if self._HTTPConnection__state == httplib._CS_IDLE:
self._HTTPConnection__state = httplib._CS_REQ_STARTED
else:
raise httplib.CannotSendRequest()
self._method = method
if proxy_type == 'http':
self._output('%s %s %s' % (method,url.geturl(),self._http_vsn_str))
if proxy_auth:
self.putheader('Proxy-Authorization', 'Basic %s' % (
'%s:%s'%proxy_auth).encode('base64').strip())
self.putheader('Proxy-Connection',
'keep-alive' if self.keep_alive else 'close')
else:
self._output('%s %s %s' % (method,url.uri,self._http_vsn_str))
self.putheader('Connection',
'keep-alive' if self.keep_alive else 'close')
if self._http_vsn == 11:
if 'Host' not in headers:
self.putheader('Host', url.host)
if 'Accept-Encoding' not in headers:
self.putheader('Accept-Encoding', 'identity')
if 'User-Agent' not in headers:
self.putheader('User-Agent', self._user_agent)
def _send_request(self, method, url, body, headers):
self.putrequest(method, url, headers)
if body:
if 'Content-Type' not in headers:
self.putheader('Content-Type',
'application/x-www-form-urlencoded')
if 'Content-Length' not in headers:
try:
self.putheader('Content-Length', str(len(body)))
except TypeError:
import os
try:
self.putheader('Content-Length',
str(os.fstat(body.fileno()).st_size))
except (AttributeError, OSError):
if self.debuglevel > 0: print 'Cannot stat!!'
for k,v in headers.iteritems():
self.putheader(k, v)
self.endheaders()
if body:
self.send(body)
def open(self,url,body=None,method=None,headers=httpheaders.HTTPHeaders()):
if not method: method = 'POST' if body else 'GET'
if not isinstance(url, urlinfo.URL):
url = urlinfo.URL(url)
if not isinstance(headers, httpheaders.HTTPHeaders):
headers = httpheaders.HTTPHeaders(headers)
del headers['Connection'], headers['Proxy-Connection']
try:
self.request(method, url, body, headers)
return self.getresponse()
except (httplib.socket.error, error):
self.close()
self.request(method, url, body, headers)
return self.getresponse()
try:
import ssl
except ImportError:
pass
else:
class HTTPSFetch(HTTPFetch):
def connect(self):
proxysock = self.proxy.connect(self.addr, 1, self.selfport)
self.sock = ssl.wrap_socket(proxysock)
__all__.append('HTTPSFetch')
_proxy = [None, None]
def fetch(url, body=None, method=None, headers=None, proxy=None):
if not isinstance(url, urlinfo.URL):
url = urlinfo.URL(url)
if url.scheme == 'http':
opener = HTTPFetch(proxy if proxy else _proxy[0], False)
else:
opener = HTTPSFetch(proxy if proxy else _proxy[1], False)
#opener.set_debuglevel(1)
return opener.open(url, body, method, headers) | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
__patcher__ = 'ZHRtYWppYUAxNjMuY29t'.decode('base64')
'''
see http://en.wikipedia.org/wiki/SOCKS#Protocol
and http://www.ietf.org/rfc/rfc1928.txt
'''
import socket
import struct
import urlinfo
__all__ = ['ProxyError', 'Proxy']
hosts = [{}, []]
def map_hosts(host):
newhost = hosts[0].get(host)
if newhost is None:
for k,v in hosts[1]:
if host.endswith(k):
newhost = v
break
if newhost is not None:
return newhost
return host
def create_connection(address, timeout=-1):
msg = "getaddrinfo returns an empty list"
host, port = address
host = map_hosts(host)
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
if timeout is None or timeout>=0:
sock.settimeout(timeout)
sock.connect(sa)
return sock
except socket.error, msg:
if sock is not None:
sock.close()
raise socket.error, msg
if not hasattr(socket, 'inet_pton'):
def inet_pton(af, ip):
'''inet_pton(af, ip) -> packed IP address string'''
ip = str(ip)
msg = 'illegal IP address string passed to inet_pton'
if af == socket.AF_INET:
return socket.inet_aton(ip)
elif af == socket.AF_INET6:
ip = ip.split('::')
ln = len(ip)
if ln == 1:
ip = ip[0].split(':')
if len(ip) != 8:
raise socket.error, msg
elif ln == 2:
ip[0] = ip[0].split(':') if ip[0] else []
ip[1] = ip[1].split(':') if ip[1] else []
ln = len(ip[0]) + len(ip[1])
if ln >= 8:
raise socket.error, msg
ip = ip[0] + ['0000']*(8-ln) + ip[1]
else:
raise socket.error, msg
for i,v in enumerate(ip):
ln = len(v)
if ln==0 or ln>4:
raise socket.error, msg
ip[i] = '0'*(4-ln) + v
try:
return ''.join(ip).decode('hex')
except:
raise socket.error, msg
else:
raise socket.error, (97, 'Address family not supported by protocol')
socket.inet_pton = inet_pton; del inet_pton
if not hasattr(socket, 'inet_ntop'):
def inet_ntop(af, ip):
'''inet_ntop(af, packed_ip) -> string formatted IP address'''
ip = str(ip)
if af == socket.AF_INET:
return socket.inet_ntoa(ip)
elif af == socket.AF_INET6:
if len(ip) != 16:
raise ValueError, 'invalid length of packed IP address string'
ip = ip.encode('hex')
lst = [None] * 8
for i in xrange(8):
lst[i] = ip[i*4:i*4+4].lstrip('0')
if not lst[i]: lst[i] = '0'
beststart = bestend = -1
i = 0
while i < 8:
if lst[i] == '0':
start = i
while i<8 and lst[i]=='0': i+=1
if i-start > bestend-beststart:
beststart = start
bestend = i
i += 1
if beststart!=-1 and bestend-beststart>1:
if beststart==0 and bestend>7:
return '::'
elif beststart==0 or bestend>7:
lst[beststart:bestend] = [':']
else:
lst[beststart:bestend] = ['']
return ':'.join(lst)
else:
raise ValueError, 'unknown address family %s' % af
socket.inet_ntop = inet_ntop; del inet_ntop
class ProxyError(Exception):
info = ('invalid response', #0
'general SOCKS server failure', #0x01
'connection not allowed by ruleset', #0x02
'network unreachable', #0x03
'host unreachable', #0x04
'connection refused', #0x05
'TTL expired', #0x06
'command not supported', #0x07
'address type not supported', #0x08
'authentication failed', #9
'request self') #10
class proxysocket(socket.socket):
def __init__(self, sock, proxypeer):
if hasattr(sock, '_sock'):
sock = sock._sock
socket.socket.__init__(self, _sock=sock)
self.__proxypeer = proxypeer
def getproxypeer(self):
return self.__proxypeer
def recvall(self, size):
data = ''
while size > 0:
try:
tmp = self.recv(size)
except socket.error, e:
if e.args[0] == socket.EINTR:
continue
raise
if not tmp: break
data += tmp
size -= len(tmp)
return data
class Proxy:
timeout = 20
def __init__(self, proxy=None, timeout=-1):
if timeout is None or timeout>=0:
self.timeout = timeout
if not proxy:
self._proxy = None
self.type = None, None
else:
if isinstance(proxy, basestring):
proxy = (proxy,)
self._proxy = [None] * len(proxy)
for i in xrange(len(proxy)-1):
self._proxy[i] = self._parse_proxy(proxy[i])
if self._proxy[i][0] == 'http':
raise ValueError('Bad proxies order')
self._proxy[-1] = self._parse_proxy(proxy[-1])
self.type = self._proxy[-1][0], self._proxy[-1][2]
def _parse_proxy(self, proxy):
url = urlinfo.URL(proxy)
if url.port is None:
url.port = 1080
if url.scheme == 'socks':
url.scheme = 'socks5'
auth = None
if url.scheme in ('http', 'https', 'socks5'):
if url.username is not None and url.password is not None:
auth = url.username, url.password
elif url.scheme == 'socks4':
auth = url.username or ''
else:
raise ValueError('Bad proxy type: '+url.scheme)
dns = url.parse_qs().get('dns')
if dns and dns[-1].lower() in ('true', 'yes', '1'):
dns = True
else:
dns = False
return url.scheme, (url.hostname, url.port), auth, dns
def connect(self, addr, cmd=1, selfport=None):
proxysock = self._connect(addr, cmd)
if selfport is not None:
ip, port = proxysock.getproxypeer()
if ip==proxysock.getsockname()[0] and port==selfport:
raise ProxyError(10) #request self
return proxysock
def _connect(self, addr, cmd=1):
p = self._proxy
if not p:
if cmd == 1:
return self.__connect(addr)
raise ProxyError(7) #command not supported
sock = self.__connect(p[0][1])
try:
for i in xrange(len(p)-1):
connector = getattr(self, '_connect_in_'+p[i][0])
sock = connector(sock, p[i][2], p[i][3], p[i+1][1], 1)
if p[-1][0] != 'http':
connector = getattr(self, '_connect_in_'+p[-1][0])
sock = connector(sock, p[-1][2], p[-1][3], addr, cmd)
return sock
except Exception, e:
sock.close()
if type(e) == ProxyError: raise
raise ProxyError(repr(e))
def __connect(self, addr):
try:
sock = create_connection(addr, self.timeout)
return proxysocket(sock, sock.getpeername()[:2])
except socket.error:
raise ProxyError(4) #host unreachable
def _connect_in_socks4(self, sock, auth, dns, addr, cmd=1):
if cmd not in (1, 2):
raise ProxyError(7) #command not supported
addr, port = addr
if dns:
try:
addr = socket.gethostbyname(addr)
except socket.error:
raise ProxyError(4) #host unreachable
try:
addr = socket.inet_aton(addr); dns = True
req = '\x04%s%s%s\x00' % (struct.pack('>BH',cmd,port), addr, auth)
except socket.error: #try SOCKS 4a
dns = False
req = '\x04%s\x00\x00\x00\x01%s\x00%s\x00' % (
struct.pack('>BH',cmd,port), auth, addr)
sock.sendall(req)
resp = sock.recvall(8)
if resp[0] != '\x00':
raise ProxyError(0) #invalid response
if resp[1] != '\x5a':
if resp[1] == '\x5b': raise ProxyError(5) #connection refused
if resp[1] == '\x5c': raise ProxyError(4) #host unreachable
if resp[1] == '\x5d': raise ProxyError(9) #authentication failed
raise ProxyError(0) #invalid response
if dns:
return proxysocket(sock, (socket.inet_ntoa(addr), port))
else:
addr = (socket.inet_ntoa(resp[4:]),struct.unpack('>H',resp[2:4])[0])
return proxysocket(sock, addr)
def _dnsresolve(self, addr):
addr = urlinfo.host2ip(addr)
if addr[0]: return addr[0][0]
elif addr[1]: return addr[1][0]
return None
def _connect_in_socks5(self, sock, auth, dns, addr, cmd=1):
if cmd not in (1, 2, 3):
raise ProxyError(7) #command not supported
sock.sendall('\x05\x02\x00\x02' if auth else '\x05\x01\x00')
resp = sock.recvall(2)
if resp[0] != '\x05':
raise ProxyError(0) #invalid response
if resp[1] == '\x02':
sock.sendall('\x01%s%s%s%s' % (chr(len(auth[0])), auth[0],
chr(len(auth[1])), auth[1]))
resp = sock.recvall(2)
if resp[1] != '\x00':
raise ProxyError(9) #authentication failed
addr, port = addr
if dns:
addr = self._dnsresolve(addr)
if not addr:
raise ProxyError(4) #host unreachable
if ':' in addr: #IPv6
try:
addr = '\x04' + socket.inet_pton(socket.AF_INET6, addr)
except socket.error:
raise ProxyError(4) #host unreachable
else:
try:
addr = '\x01' + socket.inet_aton(addr) #IPv4
except socket.error:
addr = '\x03%s%s' % (chr(len(addr)), addr) #domain
req = '\x05%s\x00%s%s' % (chr(cmd), addr, struct.pack('>H',port))
sock.sendall(req)
resp = sock.recvall(4)
if resp[0] != '\x05':
raise ProxyError(0) #invalid response
if resp[1] != '\x00':
raise ProxyError(ord(resp[1]))
if resp[3] == '\x01': #IPv4 address
addr = socket.inet_ntoa(sock.recvall(4))
elif resp[3] == '\x03': #Domain name
addr = sock.recvall(ord(sock.recvall(1)))
elif resp[3] == '\x04': #IPv6 address
addr = socket.inet_ntop(socket.AF_INET6, sock.recvall(16))
else:
raise ProxyError(8) #address type not supported
port = struct.unpack('>H',sock.recvall(2))[0]
return proxysocket(sock, (addr,port))
def _connect_in_https(self, sock, auth, dns, addr, cmd=1):
if cmd != 1:
raise ProxyError(7) #command not supported
addr, port = addr
if dns:
addr = self._dnsresolve(addr)
if not addr:
raise ProxyError(4) #host unreachable
addrinfo = ('[%s]:%s' if ':' in addr else '%s:%s') % (addr, port)
auth = 'Proxy-Authorization: Basic %s\r\n' % ('%s:%s' % auth
).encode('base64').strip() if auth else ''
req = 'CONNECT %s HTTP/1.1\r\nAccept: */*\r\n%s\r\n' % (addrinfo, auth)
sock.sendall(req)
resp = sock.recv(1024)
if not resp.startswith('HTTP/'):
raise ProxyError(0) #invalid response
while resp.find('\r\n\r\n')==-1:
resp += sock.recv(1024)
try:
statuscode = int(resp.split('\n',1)[0].split(' ',2)[1])
except:
raise ProxyError(0) #invalid response
if statuscode != 200:
raise ProxyError('invalid statuscode: %d' % statuscode)
return proxysocket(sock, (addr, port)) | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = '%s & %s' % ('d3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64'),
'YnJvbnplMW1hbkBnbWFpbC5jb20='.decode('base64'))
import hashlib, itertools
__all__ = ['Crypto']
class XOR:
'''XOR with pure Python in case no PyCrypto'''
def __init__(self, key):
self.key = key
def encrypt(self, data):
xorsize = 1024
key = itertools.cycle(map(ord, self.key))
dr = xrange(0, len(data), xorsize)
ss = [None] * len(dr)
for i,j in enumerate(dr):
dd = [ord(d)^k for d,k in itertools.izip(data[j:j+xorsize], key)]
ss[i] = ''.join(map(chr, dd))
return ''.join(ss)
decrypt = encrypt
class NUL:
def encrypt(self, data):
return data
decrypt = encrypt
class Crypto:
_BlockSize = {'AES':16, 'ARC2':8, 'ARC4':1, 'Blowfish':8, 'CAST':8,
'DES':8, 'DES3':8, 'IDEA':8, 'RC5':8, 'XOR':1}
_Modes = ['ECB', 'CBC', 'CFB', 'OFB', 'PGP'] #CTR needs 4 args
_KeySize = {'AES':[16,24,32], 'CAST':xrange(5,17),
'DES':[8], 'DES3':[16,24], 'IDEA':[16]}
def __init__(self, mode='AES-CBC-32'):
mode = mode.split('-')
mode += [''] * (3 - len(mode))
#check cipher
self.cipher = mode[0] if mode[0] else 'AES'
if self.cipher not in self._BlockSize:
raise ValueError('Invalid cipher: '+self.cipher)
#check ciphermode
if self._BlockSize[self.cipher] == 1:
self.ciphermode = ''
else:
self.ciphermode = mode[1] if mode[1] in self._Modes else 'CBC'
#check keysize
try:
self.keysize = int(mode[2])
except ValueError:
self.keysize = 32
if self.keysize != 0:
if self.cipher in self._KeySize:
keysize = self._KeySize[self.cipher]
if self.keysize not in keysize:
self.keysize = keysize[-1]
#avoid Memmory Error
if self.cipher=='RC5' and self.keysize in (1, 57): self.keysize=32
#try to import Crypto.Cipher.xxxx
try:
cipherlib = __import__('Crypto.Cipher.'+self.cipher, fromlist='x')
self._newobj = cipherlib.new
if self._BlockSize[self.cipher] != 1:
self._ciphermode = getattr(cipherlib, 'MODE_'+self.ciphermode)
except ImportError:
if self.cipher == 'XOR': self._newobj = XOR
else: raise
def paddata(self, data):
blocksize = self._BlockSize[self.cipher]
if blocksize != 1:
padlen = (blocksize - len(data) - 1) % blocksize
data = '%s%s%s' % (chr(padlen), ' '*padlen, data)
return data
def unpaddata(self, data):
if self._BlockSize[self.cipher] != 1:
padlen = ord(data[0])
data = data[padlen+1:]
return data
def getcrypto(self, key):
if self.keysize==0 and key=='':
return NUL()
khash = hashlib.sha512(key).digest()
if self.keysize != 0:
key = khash[:self.keysize]
blocksize = self._BlockSize[self.cipher]
if blocksize == 1:
return self._newobj(key)
return self._newobj(key, self._ciphermode, khash[-blocksize:])
def encrypt(self, data, key):
crypto = self.getcrypto(key)
data = self.paddata(data)
return crypto.encrypt(data)
def decrypt(self, data, key):
crypto = self.getcrypto(key)
data = crypto.decrypt(data)
return self.unpaddata(data)
def getmode(self):
return '%s-%s-%d' % (self.cipher, self.ciphermode, self.keysize)
def __str__(self):
return '%s("%s")' % (self.__class__, self.getmode())
def getsize(self, size):
blocksize = self._BlockSize[self.cipher]
return (size + blocksize - 1) // blocksize * blocksize
class Crypto2(Crypto):
def paddata(self, data):
blocksize = self._BlockSize[self.cipher]
if blocksize != 1:
padlen = (blocksize - len(data) - 1) % blocksize
data = '%s%s%s' % (data, ' '*padlen, chr(padlen))
return data
def unpaddata(self, data):
if self._BlockSize[self.cipher] != 1:
padlen = ord(data[-1])
data = data[:-(padlen+1)]
return data | Python |
# Copyright (C) 2010-2011 | GNU GPLv3
__author__ = 'd3d3LmVodXN0QGdtYWlsLmNvbQ=='.decode('base64')
import os
import urlparse, urllib
__all__ = ['url2path', 'path2url', 'parse_netloc', 'unparse_netloc',
'host2ip', 'URL']
default_ports = {'http':80, 'https':443, 'ftp':21}
def url2path(path):
"""url2path('/a%20b.txt') -> '/a b.txt' or url2path('/C:/a%20b.txt')
-> 'C:\\a b.txt'"""
if os.name == 'nt':
i = path.find(':')
if i > 0: path = path[i-1:]
path = urllib.unquote(path)
if os.sep != '/':
path = path.replace('/', os.sep)
return path
def path2url(path):
"""path2url('/a b.txt') -> '/a%20b.txt' or path2url('C:\\a b.txt') ->
'/C:/a%20b.txt'"""
if os.name == 'nt':
driver, s, path = path.rpartition(':')
if os.sep != '/':
path = path.replace(os.sep, '/')
path = urllib.quote(path)
if os.name=='nt' and s:
path = '/%s%s%s' % (driver, s, path)
return path
def parse_netloc(netloc, scheme=None):
"""parse_netloc('user:pass@Example.com', 'http') -> ('example.com', 80,
'user', 'pass')"""
username = password = port = None
if '@' in netloc:
username, netloc = netloc.rsplit('@', 1)
if ':' in username:
username, password = username.split(':', 1)
i = netloc.rfind(':')
j = netloc.rfind(']')
if i != -1:
if (j==-1 and netloc.find(':')==i) or (j!=-1 and i>j):
try:
port = int(netloc[i+1:])
except ValueError:
pass
netloc = netloc[:i]
if j != -1:
netloc = netloc[netloc.find('[')+1:j]
if port is None:
port = default_ports.get(scheme)
hostname = netloc.lower()
return hostname, port, username, password
def unparse_netloc(hostname, port=None, username=None,
password=None, scheme=None):
"""unparse_netloc('example.com', 80, 'user', 'pass', 'http') ->
'user:pass@example.com'"""
if ':' in hostname:
hostname = '[%s]' % hostname
if port is not None and port!=default_ports.get(scheme):
hostname = '%s:%s' % (hostname, port)
if username is not None:
if password is not None:
hostname = '%s:%s@%s' % (username, password, hostname)
else:
hostname = '%s@%s' % (username, hostname)
return hostname
def host2ip(hostname):
"""host2ip('localhost') -> (['127.0.0.1'], ['::1'])"""
ip = [], []
try:
import socket
dic = {socket.AF_INET:ip[0], socket.AF_INET6:ip[1]}
for i in socket.getaddrinfo(hostname, 80, 0, socket.SOCK_STREAM):
dic[i[0]].append(i[4][0])
except:
pass
return ip
class URL(object):
def __init__(self, url):
url = urlparse.urlparse(url)
if url[0] == 'file':
self.scheme = url[0]
self.path = url2path(url[1] + url[2])
return
(self.scheme, netloc, path, self.params,
self.query, self.fragment) = url
self.path = path or '/'
(self.hostname, self.port, self.username,
self.password) = parse_netloc(netloc, self.scheme)
@property
def ip(self):
if not hasattr(self, '_ip') or self._ip[0]!=self.hostname:
self._ip = (self.hostname, host2ip(self.hostname))
return self._ip[1]
@property
def host(self):
return unparse_netloc(self.hostname, self.port, scheme=self.scheme)
@property
def uri(self):
if self.scheme == 'file':
return self.path
return urlparse.urlunparse(('', '', self.path,
self.params, self.query, ''))
def geturl(self, userinfo=False, fragment=False):
if self.scheme == 'file':
return 'file://' + path2url(self.path)
netloc = unparse_netloc(self.hostname, self.port, self.username
if userinfo else None, self.password, self.scheme)
return urlparse.urlunparse((self.scheme, netloc, self.path,
self.params, self.query, self.fragment if fragment else ''))
def parse_qs(self, keep_blank_values=0, strict_parsing=0):
if not hasattr(urlparse, 'parse_qs'):
import cgi
urlparse.parse_qs = cgi.parse_qs
return urlparse.parse_qs(self.query, keep_blank_values, strict_parsing)
def __getstate__(self):
if self.scheme == 'file':
return (self.scheme, self.path)
return (self.scheme, self.hostname, self.port, self.username,
self.password, self.path, self.params, self.query, self.fragment)
def __setstate__(self, data):
self.scheme, data = data[0], data[1:]
if self.scheme == 'file':
self.path = data[0]
else:
(self.hostname, self.port, self.username, self.password,
self.path, self.params, self.query, self.fragment) = data[:8]
def __repr__(self):
if self.scheme == 'file':
fmt = "URL(scheme='%s', path='%s')"
else:
fmt = ("URL(scheme='%s', hostname='%s', port=%r, username=%r, "
"password=%r, path='%s', params='%s', query='%s', fragment='%s')")
return fmt % self.__getstate__() | Python |
#!/usr/bin/env python
import sys, os
dir = os.path.abspath(os.path.dirname(sys.argv[0]))
sys.path.append(os.path.join(dir, 'src.zip'))
del sys, os, dir
import ProxyServer
ProxyServer.main()
| Python |
# coding=utf-8
from django.http import HttpResponse,HttpResponseRedirect
from django.shortcuts import render_to_response
from django.core import serializers
from django.utils import simplejson
from django.template import Template
from django.template import Context
from google.appengine.api import users
from google.appengine.ext import db
from blog.models import Category
from blog.models import Diary
from blog.models import Tag
from blog.models import Comment
from link.models import Link
def index(request):
if request.method == 'GET':
login_user = users.get_current_user()
login_url = users.create_login_url(request.get_full_path())
is_user_admin = users.is_current_user_admin()
logout_url = users.create_logout_url(request.get_full_path())
context = {'mod':'resume',
'login_url':login_url,
'logout_url':logout_url,
'login_user':login_user,
'is_user_admin':is_user_admin,}
return render_to_response('resume/index.html', context)
| Python |
from datetime import datetime, timedelta
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.template import loader, Template, TemplateDoesNotExist
from django.utils import feedgenerator
from django.utils.tzinfo import FixedOffset
from django.utils.encoding import smart_unicode, iri_to_uri
from django.conf import settings
from django.template import RequestContext
def add_domain(domain, url):
if not (url.startswith('http://') or url.startswith('https://')):
# 'url' must already be ASCII and URL-quoted, so no need for encoding
# conversions here.
url = iri_to_uri(u'http://%s%s' % (domain, url))
return url
class FeedDoesNotExist(ObjectDoesNotExist):
pass
class Feed(object):
item_pubdate = None
item_enclosure_url = None
feed_type = feedgenerator.DefaultFeed
title_template = None
description_template = None
def __init__(self, slug, request):
self.slug = slug
self.request = request
self.feed_url = request.path
self.title_template_name = self.title_template or ('feeds/%s_title.html' % slug)
self.description_template_name = self.description_template or ('feeds/%s_description.html' % slug)
def item_link(self, item):
try:
return item.get_absolute_url()
except AttributeError:
raise ImproperlyConfigured, "Give your %s class a get_absolute_url() method, or define an item_link() method in your Feed class." % item.__class__.__name__
def __get_dynamic_attr(self, attname, obj, default=None):
try:
attr = getattr(self, attname)
except AttributeError:
return default
if callable(attr):
# Check func_code.co_argcount rather than try/excepting the
# function and catching the TypeError, because something inside
# the function may raise the TypeError. This technique is more
# accurate.
if hasattr(attr, 'func_code'):
argcount = attr.func_code.co_argcount
else:
argcount = attr.__call__.func_code.co_argcount
if argcount == 2: # one argument is 'self'
return attr(obj)
else:
return attr()
return attr
def feed_extra_kwargs(self, obj):
"""
Returns an extra keyword arguments dictionary that is used when
initializing the feed generator.
"""
return {}
def item_extra_kwargs(self, item):
"""
Returns an extra keyword arguments dictionary that is used with
the `add_item` call of the feed generator.
"""
return {}
def get_object(self, bits):
return None
def get_feed(self, url=None):
"""
Returns a feedgenerator.DefaultFeed object, fully populated, for
this feed. Raises FeedDoesNotExist for invalid parameters.
"""
if url:
bits = url.split('/')
else:
bits = []
try:
obj = self.get_object(bits)
except ObjectDoesNotExist:
raise FeedDoesNotExist
link = self.__get_dynamic_attr('link', obj)
current_site = None
feed = self.feed_type(
title = self.__get_dynamic_attr('title', obj),
subtitle = self.__get_dynamic_attr('subtitle', obj),
link = link,
description = self.__get_dynamic_attr('description', obj),
language = settings.LANGUAGE_CODE.decode(),
feed_url = self.__get_dynamic_attr('feed_url', obj),
author_name = self.__get_dynamic_attr('author_name', obj),
author_link = self.__get_dynamic_attr('author_link', obj),
author_email = self.__get_dynamic_attr('author_email', obj),
categories = self.__get_dynamic_attr('categories', obj),
feed_copyright = self.__get_dynamic_attr('feed_copyright', obj),
feed_guid = self.__get_dynamic_attr('feed_guid', obj),
ttl = self.__get_dynamic_attr('ttl', obj),
**self.feed_extra_kwargs(obj)
)
try:
title_tmp = loader.get_template(self.title_template_name)
except TemplateDoesNotExist:
title_tmp = Template('{{ obj }}')
try:
description_tmp = loader.get_template(self.description_template_name)
except TemplateDoesNotExist:
description_tmp = Template('{% autoescape off %}{{obj}}{% endautoescape %}')
for item in self.__get_dynamic_attr('items', obj):
link = self.__get_dynamic_attr('item_link', item)
enc = None
enc_url = self.__get_dynamic_attr('item_enclosure_url', item)
if enc_url:
enc = feedgenerator.Enclosure(
url = smart_unicode(enc_url),
length = smart_unicode(self.__get_dynamic_attr('item_enclosure_length', item)),
mime_type = smart_unicode(self.__get_dynamic_attr('item_enclosure_mime_type', item))
)
author_name = self.__get_dynamic_attr('item_author_name', item)
if author_name is not None:
author_email = self.__get_dynamic_attr('item_author_email', item)
author_link = self.__get_dynamic_attr('item_author_link', item)
else:
author_email = author_link = None
pubdate = self.__get_dynamic_attr('item_pubdate', item)
if pubdate:
now = datetime.now()
utcnow = datetime.utcnow()
# Must always subtract smaller time from larger time here.
if utcnow > now:
sign = -1
tzDifference = (utcnow - now)
else:
sign = 1
tzDifference = (now - utcnow)
# Round the timezone offset to the nearest half hour.
tzOffsetMinutes = sign * ((tzDifference.seconds / 60 + 15) / 30) * 30
tzOffset = timedelta(minutes=tzOffsetMinutes)
pubdate = pubdate.replace(tzinfo=FixedOffset(tzOffset))
feed.add_item(
title = title_tmp.render(RequestContext(self.request, {'obj': self.__get_dynamic_attr('item_title', item)})),
link = link,
description = description_tmp.render(RequestContext(self.request, {'obj': self.__get_dynamic_attr('item_content', item)})),
unique_id = self.__get_dynamic_attr('item_guid', item, link),
enclosure = enc,
pubdate = pubdate,
author_name = author_name,
author_email = author_email,
author_link = author_link,
categories = self.__get_dynamic_attr('item_categories', item),
item_copyright = self.__get_dynamic_attr('item_copyright', item),
**self.item_extra_kwargs(item)
)
return feed
| Python |
from django.http import HttpResponse, Http404
from feeds import FeedDoesNotExist
def feed(request, url, feed_dict=None):
if not feed_dict:
raise Http404, "No feeds are registered."
try:
slug, param = url.split('/', 1)
except ValueError:
slug, param = url, ''
try:
f = feed_dict[slug]
except KeyError:
raise Http404, "Slug %r isn't registered." % slug
try:
feedgen = f(slug, request).get_feed(param)
except FeedDoesNotExist:
raise Http404, "Invalid feed parameters. Slug %r is valid, but other parameters, or lack thereof, are not." % slug
response = HttpResponse(mimetype=feedgen.mime_type)
feedgen.write(response, 'utf-8')
return response
| Python |
# coding=utf-8
from google.appengine.ext import db
class WebSite(db.Model):
title = db.StringProperty( default = '没有比人更高的山' )
subtitle = db.StringProperty( default = 'Where There is a Will There is a Way' )
blog_entry_count = db.IntegerProperty( default = 0 )
posts_per_page = db.IntegerProperty( default = 5 )
rss_url = db.StringProperty( default = '/rss/' )
class Menu(db.Model):
name = db.StringProperty(required = True)
url = db.StringProperty(required = True)
target = db.StringProperty(required = True, default = '_self')
order = db.IntegerProperty(required = True, default = 0)
| Python |
import logging, os, sys
# Google App Engine imports.
from google.appengine.ext.webapp import util
# Remove the standard version of Django.
for k in [k for k in sys.modules if k.startswith('django')]:
del sys.modules[k]
# Force sys.path to have our own directory first, in case we want to import
# from it.
if os.name=='nt':
os.unlink=lambda :None
# Add Django 1.0.2 archive to the path.
#django_path = 'django.zip'
#sys.path.insert(0, django_path)
#
from google.appengine.dist import use_library
use_library('django', '1.0')
# Add ElementTree archive to the path
elementtree_path = 'elementtree.zip'
sys.path.insert(0, elementtree_path)
# Must set this env var *before* importing any part of Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
import django.core.handlers.wsgi
import django.core.signals
import django.db
import django.dispatch.dispatcher
def log_exception(*args, **kwds):
logging.exception('Exception in request:')
# Log errors.
django.dispatch.dispatcher.connect(log_exception, django.core.signals.got_request_exception)
# Unregister the rollback event handler.
django.dispatch.dispatcher.disconnect(django.db._rollback_on_exception,django.core.signals.got_request_exception)
def main():
# Re-add Django 1.0 archive to the path, if needed.
#if django_path not in sys.path:
# sys.path.insert(0, django_path)
if elementtree_path not in sys.path:
sys.path.insert(0, elementtree_path)
# Create a Django application for WSGI.
application = django.core.handlers.wsgi.WSGIHandler()
# Run the WSGI CGI handler with that application.
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# coding=utf-8
from google.appengine.ext import db
#链接
class Link(db.Model):
title = db.StringProperty()
url = db.LinkProperty()
order = db.IntegerProperty(required = True, default = 0)
| Python |
# coding=utf-8
from django.http import HttpResponse,HttpResponseRedirect
from django.shortcuts import render_to_response
from django.core import serializers
from django.core.paginator import *
from models import Catagory
from models import Diary
from models import Tag
from models import Comment
| Python |
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the integration file for Python.
"""
import cgi
import os
import re
import string
def escape(text, replace=string.replace):
"""Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
text = replace(text, "'", ''')
return text
# The FCKeditor class
class FCKeditor(object):
def __init__(self, instanceName):
self.InstanceName = instanceName
self.BasePath = '/fckeditor/'
self.Width = '100%'
self.Height = '200'
self.ToolbarSet = 'Default'
self.Value = '';
self.Config = {}
def Create(self):
return self.CreateHtml()
def CreateHtml(self):
HtmlValue = escape(self.Value)
Html = ""
if (self.IsCompatible()):
File = "fckeditor.html"
Link = "%seditor/%s?InstanceName=%s" % (
self.BasePath,
File,
self.InstanceName
)
if (self.ToolbarSet is not None):
Link += "&ToolBar=%s" % self.ToolbarSet
# Render the linked hidden field
Html += "<input type=\"hidden\" id=\"%s\" name=\"%s\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.InstanceName,
HtmlValue
)
# Render the configurations hidden field
Html += "<input type=\"hidden\" id=\"%s___Config\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.GetConfigFieldString()
)
# Render the editor iframe
Html += "<iframe id=\"%s\__Frame\" src=\"%s\" width=\"%s\" height=\"%s\" frameborder=\"0\" scrolling=\"no\"></iframe>" % (
self.InstanceName,
Link,
self.Width,
self.Height
)
else:
if (self.Width.find("%%") < 0):
WidthCSS = "%spx" % self.Width
else:
WidthCSS = self.Width
if (self.Height.find("%%") < 0):
HeightCSS = "%spx" % self.Height
else:
HeightCSS = self.Height
Html += "<textarea name=\"%s\" rows=\"4\" cols=\"40\" style=\"width: %s; height: %s;\" wrap=\"virtual\">%s</textarea>" % (
self.InstanceName,
WidthCSS,
HeightCSS,
HtmlValue
)
return Html
def IsCompatible(self):
if (os.environ.has_key("HTTP_USER_AGENT")):
sAgent = os.environ.get("HTTP_USER_AGENT", "")
else:
sAgent = ""
if (sAgent.find("MSIE") >= 0) and (sAgent.find("mac") < 0) and (sAgent.find("Opera") < 0):
i = sAgent.find("MSIE")
iVersion = float(sAgent[i+5:i+5+3])
if (iVersion >= 5.5):
return True
return False
elif (sAgent.find("Gecko/") >= 0):
i = sAgent.find("Gecko/")
iVersion = int(sAgent[i+6:i+6+8])
if (iVersion >= 20030210):
return True
return False
elif (sAgent.find("Opera/") >= 0):
i = sAgent.find("Opera/")
iVersion = float(sAgent[i+6:i+6+4])
if (iVersion >= 9.5):
return True
return False
elif (sAgent.find("AppleWebKit/") >= 0):
p = re.compile('AppleWebKit\/(\d+)', re.IGNORECASE)
m = p.search(sAgent)
if (m.group(1) >= 522):
return True
return False
else:
return False
def GetConfigFieldString(self):
sParams = ""
bFirst = True
for sKey in self.Config.keys():
sValue = self.Config[sKey]
if (not bFirst):
sParams += "&"
else:
bFirst = False
if (sValue):
k = escape(sKey)
v = escape(sValue)
if (sValue == "true"):
sParams += "%s=true" % k
elif (sValue == "false"):
sParams += "%s=false" % k
else:
sParams += "%s=%s" % (k, v)
return sParams
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
import os
try: # Windows needs stdio set for binary mode for file upload to work.
import msvcrt
msvcrt.setmode (0, os.O_BINARY) # stdin = 0
msvcrt.setmode (1, os.O_BINARY) # stdout = 1
except ImportError:
pass
from fckutil import *
from fckoutput import *
import config as Config
class GetFoldersCommandMixin (object):
def getFolders(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
s = """<Folders>""" # Open the folders node
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
s += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
s += """</Folders>""" # Close the folders node
return s
class GetFoldersAndFilesCommandMixin (object):
def getFoldersAndFiles(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders and files
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
# Open the folders / files node
folders = """<Folders>"""
files = """<Files>"""
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
folders += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
elif os.path.isfile(someObjectPath):
size = os.path.getsize(someObjectPath)
files += """<File name="%s" size="%s" />""" % (
convertToXmlAttribute(someObject),
os.path.getsize(someObjectPath)
)
# Close the folders / files node
folders += """</Folders>"""
files += """</Files>"""
return folders + files
class CreateFolderCommandMixin (object):
def createFolder(self, resourceType, currentFolder):
"""
Purpose: command to create a new folder
"""
errorNo = 0; errorMsg ='';
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
newFolder = sanitizeFolderName (newFolder)
try:
newFolderPath = mapServerFolder(self.userFilesFolder, combinePaths(currentFolder, newFolder))
self.createServerFolder(newFolderPath)
except Exception, e:
errorMsg = str(e).decode('iso-8859-1').encode('utf-8') # warning with encodigns!!!
if hasattr(e,'errno'):
if e.errno==17: #file already exists
errorNo=0
elif e.errno==13: # permission denied
errorNo = 103
elif e.errno==36 or e.errno==2 or e.errno==22: # filename too long / no such file / invalid name
errorNo = 102
else:
errorNo = 110
else:
errorNo = 102
return self.sendErrorNode ( errorNo, errorMsg )
def createServerFolder(self, folderPath):
"Purpose: physically creates a folder on the server"
# No need to check if the parent exists, just create all hierachy
try:
permissions = Config.ChmodOnFolderCreate
if not permissions:
os.makedirs(folderPath)
except AttributeError: #ChmodOnFolderCreate undefined
permissions = 0755
if permissions:
oldumask = os.umask(0)
os.makedirs(folderPath,mode=0755)
os.umask( oldumask )
class UploadFileCommandMixin (object):
def uploadFile(self, resourceType, currentFolder):
"""
Purpose: command to upload files to server (same as FileUpload)
"""
errorNo = 0
if self.request.has_key("NewFile"):
# newFile has all the contents we need
newFile = self.request.get("NewFile", "")
# Get the file name
newFileName = newFile.filename
newFileName = sanitizeFileName( newFileName )
newFileNameOnly = removeExtension(newFileName)
newFileExtension = getExtension(newFileName).lower()
allowedExtensions = Config.AllowedExtensions[resourceType]
deniedExtensions = Config.DeniedExtensions[resourceType]
if (allowedExtensions):
# Check for allowed
isAllowed = False
if (newFileExtension in allowedExtensions):
isAllowed = True
elif (deniedExtensions):
# Check for denied
isAllowed = True
if (newFileExtension in deniedExtensions):
isAllowed = False
else:
# No extension limitations
isAllowed = True
if (isAllowed):
# Upload to operating system
# Map the virtual path to the local server path
currentFolderPath = mapServerFolder(self.userFilesFolder, currentFolder)
i = 0
while (True):
newFilePath = os.path.join (currentFolderPath,newFileName)
if os.path.exists(newFilePath):
i += 1
newFileName = "%s(%04d).%s" % (
newFileNameOnly, i, newFileExtension
)
errorNo= 201 # file renamed
else:
# Read file contents and write to the desired path (similar to php's move_uploaded_file)
fout = file(newFilePath, 'wb')
while (True):
chunk = newFile.file.read(100000)
if not chunk: break
fout.write (chunk)
fout.close()
if os.path.exists ( newFilePath ):
doChmod = False
try:
doChmod = Config.ChmodOnUpload
permissions = Config.ChmodOnUpload
except AttributeError: #ChmodOnUpload undefined
doChmod = True
permissions = 0755
if ( doChmod ):
oldumask = os.umask(0)
os.chmod( newFilePath, permissions )
os.umask( oldumask )
newFileUrl = self.webUserFilesFolder + currentFolder + newFileName
return self.sendUploadResults( errorNo , newFileUrl, newFileName )
else:
return self.sendUploadResults( errorNo = 203, customMsg = "Extension not allowed" )
else:
return self.sendUploadResults( errorNo = 202, customMsg = "No File" )
| Python |
#!/usr/bin/env python
"""
* FCKeditor - The text editor for Internet - http://www.fckeditor.net
* Copyright (C) 2003-2008 Frederico Caldeira Knabben
*
* == BEGIN LICENSE ==
*
* Licensed under the terms of any of the following licenses at your
* choice:
*
* - GNU General Public License Version 2 or later (the "GPL")
* http://www.gnu.org/licenses/gpl.html
*
* - GNU Lesser General Public License Version 2.1 or later (the "LGPL")
* http://www.gnu.org/licenses/lgpl.html
*
* - Mozilla Public License Version 1.1 or later (the "MPL")
* http://www.mozilla.org/MPL/MPL-1.1.html
*
* == END LICENSE ==
*
* Configuration file for the File Manager Connector for Python
"""
# INSTALLATION NOTE: You must set up your server environment accordingly to run
# python scripts. This connector requires Python 2.4 or greater.
#
# Supported operation modes:
# * WSGI (recommended): You'll need apache + mod_python + modpython_gateway
# or any web server capable of the WSGI python standard
# * Plain Old CGI: Any server capable of running standard python scripts
# (although mod_python is recommended for performance)
# This was the previous connector version operation mode
#
# If you're using Apache web server, replace the htaccess.txt to to .htaccess,
# and set the proper options and paths.
# For WSGI and mod_python, you may need to download modpython_gateway from:
# http://projects.amor.org/misc/svn/modpython_gateway.py and copy it in this
# directory.
# SECURITY: You must explicitly enable this "connector". (Set it to "True").
# WARNING: don't just set "ConfigIsEnabled = True", you must be sure that only
# authenticated users can access this file or use some kind of session checking.
Enabled = False
# Path to user files relative to the document root.
UserFilesPath = '/userfiles/'
# Fill the following value it you prefer to specify the absolute path for the
# user files directory. Useful if you are using a virtual directory, symbolic
# link or alias. Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'UserFilesPath' must point to the same directory.
# WARNING: GetRootPath may not work in virtual or mod_python configurations, and
# may not be thread safe. Use this configuration parameter instead.
UserFilesAbsolutePath = ''
# Due to security issues with Apache modules, it is recommended to leave the
# following setting enabled.
ForceSingleExtension = True
# What the user can do with this connector
ConfigAllowedCommands = [ 'QuickUpload', 'FileUpload', 'GetFolders', 'GetFoldersAndFiles', 'CreateFolder' ]
# Allowed Resource Types
ConfigAllowedTypes = ['File', 'Image', 'Flash', 'Media']
# After file is uploaded, sometimes it is required to change its permissions
# so that it was possible to access it at the later time.
# If possible, it is recommended to set more restrictive permissions, like 0755.
# Set to 0 to disable this feature.
# Note: not needed on Windows-based servers.
ChmodOnUpload = 0755
# See comments above.
# Used when creating folders that does not exist.
ChmodOnFolderCreate = 0755
# Do not touch this 3 lines, see "Configuration settings for each Resource Type"
AllowedExtensions = {}; DeniedExtensions = {};
FileTypesPath = {}; FileTypesAbsolutePath = {};
QuickUploadPath = {}; QuickUploadAbsolutePath = {};
# Configuration settings for each Resource Type
#
# - AllowedExtensions: the possible extensions that can be allowed.
# If it is empty then any file type can be uploaded.
# - DeniedExtensions: The extensions that won't be allowed.
# If it is empty then no restrictions are done here.
#
# For a file to be uploaded it has to fulfill both the AllowedExtensions
# and DeniedExtensions (that's it: not being denied) conditions.
#
# - FileTypesPath: the virtual folder relative to the document root where
# these resources will be located.
# Attention: It must start and end with a slash: '/'
#
# - FileTypesAbsolutePath: the physical path to the above folder. It must be
# an absolute path.
# If it's an empty string then it will be autocalculated.
# Useful if you are using a virtual directory, symbolic link or alias.
# Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'FileTypesPath' must point to the same directory.
# Attention: It must end with a slash: '/'
#
#
# - QuickUploadPath: the virtual folder relative to the document root where
# these resources will be uploaded using the Upload tab in the resources
# dialogs.
# Attention: It must start and end with a slash: '/'
#
# - QuickUploadAbsolutePath: the physical path to the above folder. It must be
# an absolute path.
# If it's an empty string then it will be autocalculated.
# Useful if you are using a virtual directory, symbolic link or alias.
# Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'QuickUploadPath' must point to the same directory.
# Attention: It must end with a slash: '/'
AllowedExtensions['File'] = ['7z','aiff','asf','avi','bmp','csv','doc','fla','flv','gif','gz','gzip','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','ods','odt','pdf','png','ppt','pxd','qt','ram','rar','rm','rmi','rmvb','rtf','sdc','sitd','swf','sxc','sxw','tar','tgz','tif','tiff','txt','vsd','wav','wma','wmv','xls','xml','zip']
DeniedExtensions['File'] = []
FileTypesPath['File'] = UserFilesPath + 'file/'
FileTypesAbsolutePath['File'] = (not UserFilesAbsolutePath == '') and (UserFilesAbsolutePath + 'file/') or ''
QuickUploadPath['File'] = FileTypesPath['File']
QuickUploadAbsolutePath['File'] = FileTypesAbsolutePath['File']
AllowedExtensions['Image'] = ['bmp','gif','jpeg','jpg','png']
DeniedExtensions['Image'] = []
FileTypesPath['Image'] = UserFilesPath + 'image/'
FileTypesAbsolutePath['Image'] = (not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'image/' or ''
QuickUploadPath['Image'] = FileTypesPath['Image']
QuickUploadAbsolutePath['Image']= FileTypesAbsolutePath['Image']
AllowedExtensions['Flash'] = ['swf','flv']
DeniedExtensions['Flash'] = []
FileTypesPath['Flash'] = UserFilesPath + 'flash/'
FileTypesAbsolutePath['Flash'] = ( not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'flash/' or ''
QuickUploadPath['Flash'] = FileTypesPath['Flash']
QuickUploadAbsolutePath['Flash']= FileTypesAbsolutePath['Flash']
AllowedExtensions['Media'] = ['aiff','asf','avi','bmp','fla', 'flv','gif','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','png','qt','ram','rm','rmi','rmvb','swf','tif','tiff','wav','wma','wmv']
DeniedExtensions['Media'] = []
FileTypesPath['Media'] = UserFilesPath + 'media/'
FileTypesAbsolutePath['Media'] = ( not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'media/' or ''
QuickUploadPath['Media'] = FileTypesPath['Media']
QuickUploadAbsolutePath['Media']= FileTypesAbsolutePath['Media']
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Utility functions for the File Manager Connector for Python
"""
import string, re
import os
import config as Config
# Generic manipulation functions
def removeExtension(fileName):
index = fileName.rindex(".")
newFileName = fileName[0:index]
return newFileName
def getExtension(fileName):
index = fileName.rindex(".") + 1
fileExtension = fileName[index:]
return fileExtension
def removeFromStart(string, char):
return string.lstrip(char)
def removeFromEnd(string, char):
return string.rstrip(char)
# Path functions
def combinePaths( basePath, folder ):
return removeFromEnd( basePath, '/' ) + '/' + removeFromStart( folder, '/' )
def getFileName(filename):
" Purpose: helper function to extrapolate the filename "
for splitChar in ["/", "\\"]:
array = filename.split(splitChar)
if (len(array) > 1):
filename = array[-1]
return filename
def sanitizeFolderName( newFolderName ):
"Do a cleanup of the folder name to avoid possible problems"
# Remove . \ / | : ? * " < > and control characters
return re.sub( '(?u)\\.|\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[^\u0000-\u001f\u007f-\u009f]', '_', newFolderName )
def sanitizeFileName( newFileName ):
"Do a cleanup of the file name to avoid possible problems"
# Replace dots in the name with underscores (only one dot can be there... security issue).
if ( Config.ForceSingleExtension ): # remove dots
newFileName = re.sub ( '/\\.(?![^.]*$)/', '_', newFileName ) ;
newFileName = newFileName.replace('\\','/') # convert windows to unix path
newFileName = os.path.basename (newFileName) # strip directories
# Remove \ / | : ? *
return re.sub ( '(?u)/\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[^\u0000-\u001f\u007f-\u009f]/', '_', newFileName )
def getCurrentFolder(currentFolder):
if not currentFolder:
currentFolder = '/'
# Check the current folder syntax (must begin and end with a slash).
if (currentFolder[-1] <> "/"):
currentFolder += "/"
if (currentFolder[0] <> "/"):
currentFolder = "/" + currentFolder
# Ensure the folder path has no double-slashes
while '//' in currentFolder:
currentFolder = currentFolder.replace('//','/')
# Check for invalid folder paths (..)
if '..' in currentFolder or '\\' in currentFolder:
return None
return currentFolder
def mapServerPath( environ, url):
" Emulate the asp Server.mapPath function. Given an url path return the physical directory that it corresponds to "
# This isn't correct but for the moment there's no other solution
# If this script is under a virtual directory or symlink it will detect the problem and stop
return combinePaths( getRootPath(environ), url )
def mapServerFolder(resourceTypePath, folderPath):
return combinePaths ( resourceTypePath , folderPath )
def getRootPath(environ):
"Purpose: returns the root path on the server"
# WARNING: this may not be thread safe, and doesn't work w/ VirtualServer/mod_python
# Use Config.UserFilesAbsolutePath instead
if environ.has_key('DOCUMENT_ROOT'):
return environ['DOCUMENT_ROOT']
else:
realPath = os.path.realpath( './' )
selfPath = environ['SCRIPT_FILENAME']
selfPath = selfPath [ : selfPath.rfind( '/' ) ]
selfPath = selfPath.replace( '/', os.path.sep)
position = realPath.find(selfPath)
# This can check only that this script isn't run from a virtual dir
# But it avoids the problems that arise if it isn't checked
raise realPath
if ( position < 0 or position <> len(realPath) - len(selfPath) or realPath[ : position ]==''):
raise Exception('Sorry, can\'t map "UserFilesPath" to a physical path. You must set the "UserFilesAbsolutePath" value in "editor/filemanager/connectors/py/config.py".')
return realPath[ : position ]
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
import os
try: # Windows needs stdio set for binary mode for file upload to work.
import msvcrt
msvcrt.setmode (0, os.O_BINARY) # stdin = 0
msvcrt.setmode (1, os.O_BINARY) # stdout = 1
except ImportError:
pass
from fckutil import *
from fckoutput import *
import config as Config
class GetFoldersCommandMixin (object):
def getFolders(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
s = """<Folders>""" # Open the folders node
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
s += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
s += """</Folders>""" # Close the folders node
return s
class GetFoldersAndFilesCommandMixin (object):
def getFoldersAndFiles(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders and files
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
# Open the folders / files node
folders = """<Folders>"""
files = """<Files>"""
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
folders += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
elif os.path.isfile(someObjectPath):
size = os.path.getsize(someObjectPath)
files += """<File name="%s" size="%s" />""" % (
convertToXmlAttribute(someObject),
os.path.getsize(someObjectPath)
)
# Close the folders / files node
folders += """</Folders>"""
files += """</Files>"""
return folders + files
class CreateFolderCommandMixin (object):
def createFolder(self, resourceType, currentFolder):
"""
Purpose: command to create a new folder
"""
errorNo = 0; errorMsg ='';
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
newFolder = sanitizeFolderName (newFolder)
try:
newFolderPath = mapServerFolder(self.userFilesFolder, combinePaths(currentFolder, newFolder))
self.createServerFolder(newFolderPath)
except Exception, e:
errorMsg = str(e).decode('iso-8859-1').encode('utf-8') # warning with encodigns!!!
if hasattr(e,'errno'):
if e.errno==17: #file already exists
errorNo=0
elif e.errno==13: # permission denied
errorNo = 103
elif e.errno==36 or e.errno==2 or e.errno==22: # filename too long / no such file / invalid name
errorNo = 102
else:
errorNo = 110
else:
errorNo = 102
return self.sendErrorNode ( errorNo, errorMsg )
def createServerFolder(self, folderPath):
"Purpose: physically creates a folder on the server"
# No need to check if the parent exists, just create all hierachy
try:
permissions = Config.ChmodOnFolderCreate
if not permissions:
os.makedirs(folderPath)
except AttributeError: #ChmodOnFolderCreate undefined
permissions = 0755
if permissions:
oldumask = os.umask(0)
os.makedirs(folderPath,mode=0755)
os.umask( oldumask )
class UploadFileCommandMixin (object):
def uploadFile(self, resourceType, currentFolder):
"""
Purpose: command to upload files to server (same as FileUpload)
"""
errorNo = 0
if self.request.has_key("NewFile"):
# newFile has all the contents we need
newFile = self.request.get("NewFile", "")
# Get the file name
newFileName = newFile.filename
newFileName = sanitizeFileName( newFileName )
newFileNameOnly = removeExtension(newFileName)
newFileExtension = getExtension(newFileName).lower()
allowedExtensions = Config.AllowedExtensions[resourceType]
deniedExtensions = Config.DeniedExtensions[resourceType]
if (allowedExtensions):
# Check for allowed
isAllowed = False
if (newFileExtension in allowedExtensions):
isAllowed = True
elif (deniedExtensions):
# Check for denied
isAllowed = True
if (newFileExtension in deniedExtensions):
isAllowed = False
else:
# No extension limitations
isAllowed = True
if (isAllowed):
# Upload to operating system
# Map the virtual path to the local server path
currentFolderPath = mapServerFolder(self.userFilesFolder, currentFolder)
i = 0
while (True):
newFilePath = os.path.join (currentFolderPath,newFileName)
if os.path.exists(newFilePath):
i += 1
newFileName = "%s(%04d).%s" % (
newFileNameOnly, i, newFileExtension
)
errorNo= 201 # file renamed
else:
# Read file contents and write to the desired path (similar to php's move_uploaded_file)
fout = file(newFilePath, 'wb')
while (True):
chunk = newFile.file.read(100000)
if not chunk: break
fout.write (chunk)
fout.close()
if os.path.exists ( newFilePath ):
doChmod = False
try:
doChmod = Config.ChmodOnUpload
permissions = Config.ChmodOnUpload
except AttributeError: #ChmodOnUpload undefined
doChmod = True
permissions = 0755
if ( doChmod ):
oldumask = os.umask(0)
os.chmod( newFilePath, permissions )
os.umask( oldumask )
newFileUrl = self.webUserFilesFolder + currentFolder + newFileName
return self.sendUploadResults( errorNo , newFileUrl, newFileName )
else:
return self.sendUploadResults( errorNo = 203, customMsg = "Extension not allowed" )
else:
return self.sendUploadResults( errorNo = 202, customMsg = "No File" )
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = getCurrentFolder(self.request.get("CurrentFolder",""))
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python and Zope.
This code was not tested at all.
It just was ported from pre 2.5 release, so for further reference see
\editor\filemanager\browser\default\connectors\py\connector.py in previous
releases.
"""
from fckutil import *
from connector import *
import config as Config
class FCKeditorConnectorZope(FCKeditorConnector):
"""
Zope versiof FCKeditorConnector
"""
# Allow access (Zope)
__allow_access_to_unprotected_subobjects__ = 1
def __init__(self, context=None):
"""
Constructor
"""
FCKeditorConnector.__init__(self, environ=None) # call superclass constructor
# Instance Attributes
self.context = context
self.request = FCKeditorRequest(context)
def getZopeRootContext(self):
if self.zopeRootContext is None:
self.zopeRootContext = self.context.getPhysicalRoot()
return self.zopeRootContext
def getZopeUploadContext(self):
if self.zopeUploadContext is None:
folderNames = self.userFilesFolder.split("/")
c = self.getZopeRootContext()
for folderName in folderNames:
if (folderName <> ""):
c = c[folderName]
self.zopeUploadContext = c
return self.zopeUploadContext
def setHeader(self, key, value):
self.context.REQUEST.RESPONSE.setHeader(key, value)
def getFolders(self, resourceType, currentFolder):
# Open the folders node
s = ""
s += """<Folders>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["Folder"]):
s += """<Folder name="%s" />""" % (
convertToXmlAttribute(name)
)
# Close the folders node
s += """</Folders>"""
return s
def getZopeFoldersAndFiles(self, resourceType, currentFolder):
folders = self.getZopeFolders(resourceType, currentFolder)
files = self.getZopeFiles(resourceType, currentFolder)
s = folders + files
return s
def getZopeFiles(self, resourceType, currentFolder):
# Open the files node
s = ""
s += """<Files>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["File","Image"]):
s += """<File name="%s" size="%s" />""" % (
convertToXmlAttribute(name),
((o.get_size() / 1024) + 1)
)
# Close the files node
s += """</Files>"""
return s
def findZopeFolder(self, resourceType, folderName):
# returns the context of the resource / folder
zopeFolder = self.getZopeUploadContext()
folderName = self.removeFromStart(folderName, "/")
folderName = self.removeFromEnd(folderName, "/")
if (resourceType <> ""):
try:
zopeFolder = zopeFolder[resourceType]
except:
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=resourceType, title=resourceType)
zopeFolder = zopeFolder[resourceType]
if (folderName <> ""):
folderNames = folderName.split("/")
for folderName in folderNames:
zopeFolder = zopeFolder[folderName]
return zopeFolder
def createFolder(self, resourceType, currentFolder):
# Find out where we are
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=newFolder, title=newFolder)
else:
errorNo = 102
return self.sendErrorNode ( errorNo, errorMsg )
def uploadFile(self, resourceType, currentFolder, count=None):
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
file = self.request.get("NewFile", None)
fileName = self.getFileName(file.filename)
fileNameOnly = self.removeExtension(fileName)
fileExtension = self.getExtension(fileName).lower()
if (count):
nid = "%s.%s.%s" % (fileNameOnly, count, fileExtension)
else:
nid = fileName
title = nid
try:
zopeFolder.manage_addProduct['OFSP'].manage_addFile(
id=nid,
title=title,
file=file.read()
)
except:
if (count):
count += 1
else:
count = 1
return self.zopeFileUpload(resourceType, currentFolder, count)
return self.sendUploadResults( 0 )
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, context=None):
r = context.REQUEST
self.request = r
def has_key(self, key):
return self.request.has_key(key)
def get(self, key, default=None):
return self.request.get(key, default)
"""
Running from zope, you will need to modify this connector.
If you have uploaded the FCKeditor into Zope (like me), you need to
move this connector out of Zope, and replace the "connector" with an
alias as below. The key to it is to pass the Zope context in, as
we then have a like to the Zope context.
## Script (Python) "connector.py"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters=*args, **kws
##title=ALIAS
##
import Products.zope as connector
return connector.FCKeditorConnectorZope(context=context).doResponse()
"""
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
return """<script type="text/javascript">
(function()
{
var d = document.domain ;
while ( true )
{
// Test if we can access a parent property.
try
{
var test = window.top.opener.document.domain ;
break ;
}
catch( e ) {}
// Remove a domain part: www.mytest.example.com => mytest.example.com => example.com ...
d = d.replace( /.*?(?:\.|$)/, '' ) ;
if ( d.length == 0 )
break ; // It was not able to detect the domain.
try
{
document.domain = d ;
}
catch (e)
{
break ;
}
}
})() ;
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| Python |
#!/usr/bin/env python
"""
* FCKeditor - The text editor for Internet - http://www.fckeditor.net
* Copyright (C) 2003-2008 Frederico Caldeira Knabben
*
* == BEGIN LICENSE ==
*
* Licensed under the terms of any of the following licenses at your
* choice:
*
* - GNU General Public License Version 2 or later (the "GPL")
* http://www.gnu.org/licenses/gpl.html
*
* - GNU Lesser General Public License Version 2.1 or later (the "LGPL")
* http://www.gnu.org/licenses/lgpl.html
*
* - Mozilla Public License Version 1.1 or later (the "MPL")
* http://www.mozilla.org/MPL/MPL-1.1.html
*
* == END LICENSE ==
*
* Configuration file for the File Manager Connector for Python
"""
# INSTALLATION NOTE: You must set up your server environment accordingly to run
# python scripts. This connector requires Python 2.4 or greater.
#
# Supported operation modes:
# * WSGI (recommended): You'll need apache + mod_python + modpython_gateway
# or any web server capable of the WSGI python standard
# * Plain Old CGI: Any server capable of running standard python scripts
# (although mod_python is recommended for performance)
# This was the previous connector version operation mode
#
# If you're using Apache web server, replace the htaccess.txt to to .htaccess,
# and set the proper options and paths.
# For WSGI and mod_python, you may need to download modpython_gateway from:
# http://projects.amor.org/misc/svn/modpython_gateway.py and copy it in this
# directory.
# SECURITY: You must explicitly enable this "connector". (Set it to "True").
# WARNING: don't just set "ConfigIsEnabled = True", you must be sure that only
# authenticated users can access this file or use some kind of session checking.
Enabled = False
# Path to user files relative to the document root.
UserFilesPath = '/userfiles/'
# Fill the following value it you prefer to specify the absolute path for the
# user files directory. Useful if you are using a virtual directory, symbolic
# link or alias. Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'UserFilesPath' must point to the same directory.
# WARNING: GetRootPath may not work in virtual or mod_python configurations, and
# may not be thread safe. Use this configuration parameter instead.
UserFilesAbsolutePath = ''
# Due to security issues with Apache modules, it is recommended to leave the
# following setting enabled.
ForceSingleExtension = True
# What the user can do with this connector
ConfigAllowedCommands = [ 'QuickUpload', 'FileUpload', 'GetFolders', 'GetFoldersAndFiles', 'CreateFolder' ]
# Allowed Resource Types
ConfigAllowedTypes = ['File', 'Image', 'Flash', 'Media']
# After file is uploaded, sometimes it is required to change its permissions
# so that it was possible to access it at the later time.
# If possible, it is recommended to set more restrictive permissions, like 0755.
# Set to 0 to disable this feature.
# Note: not needed on Windows-based servers.
ChmodOnUpload = 0755
# See comments above.
# Used when creating folders that does not exist.
ChmodOnFolderCreate = 0755
# Do not touch this 3 lines, see "Configuration settings for each Resource Type"
AllowedExtensions = {}; DeniedExtensions = {};
FileTypesPath = {}; FileTypesAbsolutePath = {};
QuickUploadPath = {}; QuickUploadAbsolutePath = {};
# Configuration settings for each Resource Type
#
# - AllowedExtensions: the possible extensions that can be allowed.
# If it is empty then any file type can be uploaded.
# - DeniedExtensions: The extensions that won't be allowed.
# If it is empty then no restrictions are done here.
#
# For a file to be uploaded it has to fulfill both the AllowedExtensions
# and DeniedExtensions (that's it: not being denied) conditions.
#
# - FileTypesPath: the virtual folder relative to the document root where
# these resources will be located.
# Attention: It must start and end with a slash: '/'
#
# - FileTypesAbsolutePath: the physical path to the above folder. It must be
# an absolute path.
# If it's an empty string then it will be autocalculated.
# Useful if you are using a virtual directory, symbolic link or alias.
# Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'FileTypesPath' must point to the same directory.
# Attention: It must end with a slash: '/'
#
#
# - QuickUploadPath: the virtual folder relative to the document root where
# these resources will be uploaded using the Upload tab in the resources
# dialogs.
# Attention: It must start and end with a slash: '/'
#
# - QuickUploadAbsolutePath: the physical path to the above folder. It must be
# an absolute path.
# If it's an empty string then it will be autocalculated.
# Useful if you are using a virtual directory, symbolic link or alias.
# Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'QuickUploadPath' must point to the same directory.
# Attention: It must end with a slash: '/'
AllowedExtensions['File'] = ['7z','aiff','asf','avi','bmp','csv','doc','fla','flv','gif','gz','gzip','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','ods','odt','pdf','png','ppt','pxd','qt','ram','rar','rm','rmi','rmvb','rtf','sdc','sitd','swf','sxc','sxw','tar','tgz','tif','tiff','txt','vsd','wav','wma','wmv','xls','xml','zip']
DeniedExtensions['File'] = []
FileTypesPath['File'] = UserFilesPath + 'file/'
FileTypesAbsolutePath['File'] = (not UserFilesAbsolutePath == '') and (UserFilesAbsolutePath + 'file/') or ''
QuickUploadPath['File'] = FileTypesPath['File']
QuickUploadAbsolutePath['File'] = FileTypesAbsolutePath['File']
AllowedExtensions['Image'] = ['bmp','gif','jpeg','jpg','png']
DeniedExtensions['Image'] = []
FileTypesPath['Image'] = UserFilesPath + 'image/'
FileTypesAbsolutePath['Image'] = (not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'image/' or ''
QuickUploadPath['Image'] = FileTypesPath['Image']
QuickUploadAbsolutePath['Image']= FileTypesAbsolutePath['Image']
AllowedExtensions['Flash'] = ['swf','flv']
DeniedExtensions['Flash'] = []
FileTypesPath['Flash'] = UserFilesPath + 'flash/'
FileTypesAbsolutePath['Flash'] = ( not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'flash/' or ''
QuickUploadPath['Flash'] = FileTypesPath['Flash']
QuickUploadAbsolutePath['Flash']= FileTypesAbsolutePath['Flash']
AllowedExtensions['Media'] = ['aiff','asf','avi','bmp','fla', 'flv','gif','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','png','qt','ram','rm','rmi','rmvb','swf','tif','tiff','wav','wma','wmv']
DeniedExtensions['Media'] = []
FileTypesPath['Media'] = UserFilesPath + 'media/'
FileTypesAbsolutePath['Media'] = ( not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'media/' or ''
QuickUploadPath['Media'] = FileTypesPath['Media']
QuickUploadAbsolutePath['Media']= FileTypesAbsolutePath['Media']
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Utility functions for the File Manager Connector for Python
"""
import string, re
import os
import config as Config
# Generic manipulation functions
def removeExtension(fileName):
index = fileName.rindex(".")
newFileName = fileName[0:index]
return newFileName
def getExtension(fileName):
index = fileName.rindex(".") + 1
fileExtension = fileName[index:]
return fileExtension
def removeFromStart(string, char):
return string.lstrip(char)
def removeFromEnd(string, char):
return string.rstrip(char)
# Path functions
def combinePaths( basePath, folder ):
return removeFromEnd( basePath, '/' ) + '/' + removeFromStart( folder, '/' )
def getFileName(filename):
" Purpose: helper function to extrapolate the filename "
for splitChar in ["/", "\\"]:
array = filename.split(splitChar)
if (len(array) > 1):
filename = array[-1]
return filename
def sanitizeFolderName( newFolderName ):
"Do a cleanup of the folder name to avoid possible problems"
# Remove . \ / | : ? * " < > and control characters
return re.sub( '(?u)\\.|\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[^\u0000-\u001f\u007f-\u009f]', '_', newFolderName )
def sanitizeFileName( newFileName ):
"Do a cleanup of the file name to avoid possible problems"
# Replace dots in the name with underscores (only one dot can be there... security issue).
if ( Config.ForceSingleExtension ): # remove dots
newFileName = re.sub ( '/\\.(?![^.]*$)/', '_', newFileName ) ;
newFileName = newFileName.replace('\\','/') # convert windows to unix path
newFileName = os.path.basename (newFileName) # strip directories
# Remove \ / | : ? *
return re.sub ( '(?u)/\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[^\u0000-\u001f\u007f-\u009f]/', '_', newFileName )
def getCurrentFolder(currentFolder):
if not currentFolder:
currentFolder = '/'
# Check the current folder syntax (must begin and end with a slash).
if (currentFolder[-1] <> "/"):
currentFolder += "/"
if (currentFolder[0] <> "/"):
currentFolder = "/" + currentFolder
# Ensure the folder path has no double-slashes
while '//' in currentFolder:
currentFolder = currentFolder.replace('//','/')
# Check for invalid folder paths (..)
if '..' in currentFolder or '\\' in currentFolder:
return None
return currentFolder
def mapServerPath( environ, url):
" Emulate the asp Server.mapPath function. Given an url path return the physical directory that it corresponds to "
# This isn't correct but for the moment there's no other solution
# If this script is under a virtual directory or symlink it will detect the problem and stop
return combinePaths( getRootPath(environ), url )
def mapServerFolder(resourceTypePath, folderPath):
return combinePaths ( resourceTypePath , folderPath )
def getRootPath(environ):
"Purpose: returns the root path on the server"
# WARNING: this may not be thread safe, and doesn't work w/ VirtualServer/mod_python
# Use Config.UserFilesAbsolutePath instead
if environ.has_key('DOCUMENT_ROOT'):
return environ['DOCUMENT_ROOT']
else:
realPath = os.path.realpath( './' )
selfPath = environ['SCRIPT_FILENAME']
selfPath = selfPath [ : selfPath.rfind( '/' ) ]
selfPath = selfPath.replace( '/', os.path.sep)
position = realPath.find(selfPath)
# This can check only that this script isn't run from a virtual dir
# But it avoids the problems that arise if it isn't checked
raise realPath
if ( position < 0 or position <> len(realPath) - len(selfPath) or realPath[ : position ]==''):
raise Exception('Sorry, can\'t map "UserFilesPath" to a physical path. You must set the "UserFilesAbsolutePath" value in "editor/filemanager/connectors/py/config.py".')
return realPath[ : position ]
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
return """<script type="text/javascript">
(function()
{
var d = document.domain ;
while ( true )
{
// Test if we can access a parent property.
try
{
var test = window.top.opener.document.domain ;
break ;
}
catch( e ) {}
// Remove a domain part: www.mytest.example.com => mytest.example.com => example.com ...
d = d.replace( /.*?(?:\.|$)/, '' ) ;
if ( d.length == 0 )
break ; // It was not able to detect the domain.
try
{
document.domain = d ;
}
catch (e)
{
break ;
}
}
})() ;
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.