text stringlengths 0 1.05M | meta dict |
|---|---|
from functools import wraps
import logging
from peewee import create_model_tables
from peewee import drop_model_tables
logger = logging.getLogger('peewee')
class test_database(object):
def __init__(self, db, models, create_tables=True, drop_tables=True,
fail_silently=False):
self.db = db
self.models = models
self.create_tables = create_tables
self.drop_tables = drop_tables
self.fail_silently = fail_silently
def __enter__(self):
self.orig = []
for m in self.models:
self.orig.append(m._meta.database)
m._meta.database = self.db
if self.create_tables:
create_model_tables(self.models, fail_silently=self.fail_silently)
def __exit__(self, exc_type, exc_val, exc_tb):
if self.create_tables and self.drop_tables:
drop_model_tables(self.models, fail_silently=self.fail_silently)
for i, m in enumerate(self.models):
m._meta.database = self.orig[i]
class _QueryLogHandler(logging.Handler):
def __init__(self, *args, **kwargs):
self.queries = []
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.queries.append(record)
class count_queries(object):
def __init__(self, only_select=False):
self.only_select = only_select
self.count = 0
def get_queries(self):
return self._handler.queries
def __enter__(self):
self._handler = _QueryLogHandler()
logger.setLevel(logging.DEBUG)
logger.addHandler(self._handler)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
logger.removeHandler(self._handler)
if self.only_select:
self.count = len([q for q in self._handler.queries
if q.msg[0].startswith('SELECT ')])
else:
self.count = len(self._handler.queries)
class assert_query_count(count_queries):
def __init__(self, expected, only_select=False):
super(assert_query_count, self).__init__(only_select=only_select)
self.expected = expected
def __call__(self, f):
@wraps(f)
def decorated(*args, **kwds):
with self:
ret = f(*args, **kwds)
self._assert_count()
return ret
return decorated
def _assert_count(self):
error_msg = '%s != %s' % (self.count, self.expected)
assert self.count == self.expected, error_msg
def __exit__(self, exc_type, exc_val, exc_tb):
super(assert_query_count, self).__exit__(exc_type, exc_val, exc_tb)
self._assert_count()
| {
"repo_name": "zhang625272514/peewee",
"path": "playhouse/test_utils.py",
"copies": "24",
"size": "2675",
"license": "mit",
"hash": -1213632178259833000,
"line_mean": 29.3977272727,
"line_max": 78,
"alpha_frac": 0.5914018692,
"autogenerated": false,
"ratio": 3.7570224719101124,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00039184952978056425,
"num_lines": 88
} |
from functools import wraps
import logging
import jwt
default_parse_fmt = "%d-%m-%Y %H:%M:%S"
def get_secret():
return 'sflkjsdjkfd'
def token_verify(token):
secret = get_secret()
try:
return jwt.decode(token, secret, algorithm='HS256')
except jwt.DecodeError:
return False
def log_function_entry_and_exit(decorated_function):
'''
Function decorator logging time spent.
Logging entry + exit (as logging.info),
and parameters (as logging.debug) of functions.
'''
@wraps(decorated_function)
def wrapper(*dec_fn_args, **dec_fn_kwargs):
# Log function entry
func_name = decorated_function.__name__
logging.info(f"Entering {func_name}()...")
# get function params (args and kwargs)
arg_names = decorated_function.__code__.co_varnames
params = dict(
args=dict(zip(arg_names, dec_fn_args)),
kwargs=dec_fn_kwargs
)
logging.debug(
"\t" + ', '.join(
[f"{k}={v}" for k, v in params.items()]
)
)
# Execute wrapped (decorated) function:
out = decorated_function(*dec_fn_args, **dec_fn_kwargs)
logging.info(f"Done running {func_name}()!")
return out
return wrapper
| {
"repo_name": "x10an14/overtime-calculator",
"path": "overtime_calculator/__init__.py",
"copies": "1",
"size": "1295",
"license": "mit",
"hash": -7032534723760607000,
"line_mean": 24.3921568627,
"line_max": 63,
"alpha_frac": 0.5853281853,
"autogenerated": false,
"ratio": 3.7,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47853281852999996,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
logger = logging.getLogger('peewee')
class _QueryLogHandler(logging.Handler):
def __init__(self, *args, **kwargs):
self.queries = []
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.queries.append(record)
class count_queries(object):
def __init__(self, only_select=False):
self.only_select = only_select
self.count = 0
def get_queries(self):
return self._handler.queries
def __enter__(self):
self._handler = _QueryLogHandler()
logger.setLevel(logging.DEBUG)
logger.addHandler(self._handler)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
logger.removeHandler(self._handler)
if self.only_select:
self.count = len([q for q in self._handler.queries
if q.msg[0].startswith('SELECT ')])
else:
self.count = len(self._handler.queries)
class assert_query_count(count_queries):
def __init__(self, expected, only_select=False):
super(assert_query_count, self).__init__(only_select=only_select)
self.expected = expected
def __call__(self, f):
@wraps(f)
def decorated(*args, **kwds):
with self:
ret = f(*args, **kwds)
self._assert_count()
return ret
return decorated
def _assert_count(self):
error_msg = '%s != %s' % (self.count, self.expected)
assert self.count == self.expected, error_msg
def __exit__(self, exc_type, exc_val, exc_tb):
super(assert_query_count, self).__exit__(exc_type, exc_val, exc_tb)
self._assert_count()
| {
"repo_name": "coleifer/peewee",
"path": "playhouse/test_utils.py",
"copies": "2",
"size": "1737",
"license": "mit",
"hash": -3265926478779265000,
"line_mean": 27.0161290323,
"line_max": 75,
"alpha_frac": 0.5785837651,
"autogenerated": false,
"ratio": 3.8092105263157894,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.538779429141579,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import math
import types
from django.utils.encoding import force_unicode
from django.utils.translation import ugettext_lazy as _
LOGARITHMIC, LINEAR = 1, 2
def parse_tags(tagstring):
"""
Parses tag input, with multiple word input being activated and
delineated by commas and double quotes. Quotes take precedence, so
they may contain commas.
Returns a sorted list of unique tag names.
"""
if not tagstring:
return []
tagstring = force_unicode(tagstring)
# Special case - if there are no commas or double quotes in the
# input, we don't *do* a recall... I mean, we know we only need to
# split on spaces.
if u',' not in tagstring and u'"' not in tagstring:
#words = list(set(split_strip(tagstring, u' ')))
#words.sort()
#return words
return [tagstring]
words = []
buffer = []
# Defer splitting of non-quoted sections until we know if there are
# any unquoted commas.
to_be_split = []
saw_loose_comma = False
open_quote = False
i = iter(tagstring)
try:
while True:
c = i.next()
if c == u'"':
if buffer:
to_be_split.append(u''.join(buffer))
buffer = []
# Find the matching quote
open_quote = True
c = i.next()
while c != u'"':
buffer.append(c)
c = i.next()
if buffer:
word = u''.join(buffer).strip()
if word:
words.append(word)
buffer = []
open_quote = False
else:
if not saw_loose_comma and c == u',':
saw_loose_comma = True
buffer.append(c)
except StopIteration:
# If we were parsing an open quote which was never closed treat
# the buffer as unquoted.
if buffer:
if open_quote and u',' in buffer:
saw_loose_comma = True
to_be_split.append(u''.join(buffer))
if to_be_split:
if saw_loose_comma:
delimiter = u','
else:
delimiter = u' '
for chunk in to_be_split:
words.extend(split_strip(chunk, delimiter))
words = list(set(words))
words.sort()
return words
def split_strip(input, delimiter=u','):
"""
Splits ``input`` on ``delimiter``, stripping each resulting string
and returning a list of non-empty strings.
"""
if not input:
return []
words = [w.strip() for w in input.split(delimiter)]
return [w for w in words if w]
def edit_string_for_tags(tags):
"""
Given list of ``Tag`` instances, creates a string representation of
the list suitable for editing by the user, such that submitting the
given string representation back without changing it will give the
same list of tags.
Tag names which contain commas will be double quoted.
If any tag name which isn't being quoted contains whitespace, the
resulting string of tag names will be comma-delimited, otherwise
it will be space-delimited.
"""
names = []
for tag in tags:
name = tag.name
if u',' in name or u' ' in name:
names.append('"%s"' % name)
else:
names.append(name)
return u', '.join(sorted(names))
def get_tag(tag):
"""
Utility function for accepting single tag input in a flexible
manner.
If a ``Tag`` object is given it will be returned as-is; if a
string or integer are given, they will be used to lookup the
appropriate ``Tag``.
If no matching tag can be found, ``None`` will be returned.
"""
from xtags.models import Tag
if isinstance(tag, Tag):
return tag
try:
if isinstance(tag, types.StringTypes):
return Tag.objects.get(name=tag)
elif isinstance(tag, (types.IntType, types.LongType)):
return Tag.objects.get(id=tag)
except Tag.DoesNotExist:
pass
return None
def merge(to_tag, from_tag, ctype = None):
""" Merge items with given tags together.
If there are no any items with tag 'from_tag' and
other content types, then 'from_tag' becomes a synonym for 'to_tag'.
"""
to_tag = get_tag(to_tag)
from_tag = get_tag(from_tag)
from_items = from_tag.items.all()
if ctype is not None:
from_items = from_items.filter(content_type = ctype)
to_items = to_tag.items.all()
if ctype is not None:
to_items = to_items.filter(content_type = ctype)
to_obj_ids = [item.object_id for item in to_items]
for item in from_items:
if item.object_id in to_obj_ids:
item.delete()
else:
item.tag = to_tag
item.save()
from_tag.delete()
def _calculate_thresholds(min_weight, max_weight, steps):
delta = (max_weight - min_weight) / float(steps)
return [min_weight + i * delta for i in range(1, steps + 1)]
def _calculate_tag_weight(weight, max_weight, distribution):
"""
Logarithmic tag weight calculation is based on code from the
`Tag Cloud`_ plugin for Mephisto, by Sven Fuchs.
.. _`Tag Cloud`: http://www.artweb-design.de/projects/mephisto-plugin-tag-cloud
"""
if distribution == LINEAR or max_weight == 1:
return weight
elif distribution == LOGARITHMIC:
return math.log(weight) * max_weight / math.log(max_weight)
raise ValueError(_('Invalid distribution algorithm specified: %(alg)s.') % {'alg': distribution})
def calculate_cloud(tags, steps=4, distribution=LOGARITHMIC):
"""
Add a ``font_size`` attribute to each tag according to the
frequency of its use, as indicated by its ``count``
attribute.
``steps`` defines the range of font sizes - ``font_size`` will
be an integer between 1 and ``steps`` (inclusive).
``distribution`` defines the type of font size distribution
algorithm which will be used - logarithmic or linear. It must be
one of ``tagging.utils.LOGARITHMIC`` or ``tagging.utils.LINEAR``.
"""
if len(tags) > 0:
counts = [tag.count for tag in tags]
min_weight = float(min(counts))
max_weight = float(max(counts))
thresholds = _calculate_thresholds(min_weight, max_weight, steps)
for tag in tags:
font_set = False
tag_weight = _calculate_tag_weight(tag.count, max_weight, distribution)
for i in range(steps):
#http://docs.python.org/tutorial/floatingpoint.html
if not font_set and round(tag_weight, 10) <= round(thresholds[i], 10):
tag.font_size = i + 1
font_set = True
return tags
def require_instance_manager(func):
@wraps(func)
def inner(self, *args, **kwargs):
if self.instance is None:
raise TypeError("Can't call %s with a non-instance manager" % func.__name__)
return func(self, *args, **kwargs)
return inner
| {
"repo_name": "vosi/django-xtags",
"path": "xtags/utils.py",
"copies": "1",
"size": "7135",
"license": "bsd-3-clause",
"hash": 6918574786971700000,
"line_mean": 31.7293577982,
"line_max": 101,
"alpha_frac": 0.5920112123,
"autogenerated": false,
"ratio": 3.9771460423634335,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0025447787937573465,
"num_lines": 218
} |
from functools import wraps
import math
from flask import Response, g, jsonify, request
from backend.app import app
from backend.model import Session, User
@app.errorhandler(Exception)
def handle_exception(ex):
app.logger.exception(ex)
return make_error_response('Internal server error', 500)
@app.before_request
def check_auth():
session = None
user = None
token = request.headers.get('X-Auth-Token')
if token:
session = Session.query.filter_by(token=token).first()
if not session:
return make_error_response('Invalid session token', 401)
user = session.user
else:
auth = request.authorization
if auth:
user = User.find_by_email_or_username(auth.username)
if not (user and user.password == auth.password):
return make_error_response('Invalid username/password combination', 401)
g.current_session = session
g.current_user = user
def make_error_response(message, status_code=400, **kwargs):
response = jsonify(message=message, **kwargs)
response.status_code = status_code
return response
def auth_required(f):
@wraps(f)
def wrapper(*args, **kwargs):
if not g.current_user:
return make_error_response('Auth token required', 403)
return f(*args, **kwargs)
return wrapper
def inject_context(context):
ctx = {}
if context:
ctx.update(context)
ctx['current_user'] = g.current_user
return ctx
def dump_with_schema(schema_cls, many=False, paged=False,
context=None, status_code=None,
default_per_page=30, max_per_page=100,
**schema_kwargs):
def outer(f):
@wraps(f)
def wrapper(*args, **kwargs):
result = f(*args, **kwargs)
if isinstance(result, Response):
return result
schema = schema_cls(many=(many or paged), context=inject_context(context), **schema_kwargs)
if paged:
# result should be an SQLAlchemy query
page = int(request.args.get('page', 1))
per_page = int(request.args.get('per_page', default_per_page))
if per_page > max_per_page:
return make_error_response('Exceeded max per page limit', 400)
count = result.count()
items = result.offset((page - 1) * per_page).limit(per_page)
data = {
'items': schema.dump(items).data,
'count': count,
'page': page,
'per_page': per_page,
'total_pages': math.ceil(count / per_page),
}
data = schema.dump(result).data
response = jsonify(data)
if status_code:
response.status_code = status_code
return response
return wrapper
return outer
def load_with_schema(schema_cls, **schema_kwargs):
def outer(f):
@wraps(f)
def wrapper(*args, **kwargs):
schema = schema_cls(**schema_kwargs)
json = request.get_json(force=True)
data, errors = schema.load(json)
if errors:
return make_error_response('Validation failed', errors=errors, status_code=422)
return f(data=data, *args, **kwargs)
return wrapper
return outer
from backend.routes import sessions, user # noqa
| {
"repo_name": "ianunruh/flask-api-skeleton",
"path": "backend/routes/__init__.py",
"copies": "1",
"size": "3512",
"license": "mit",
"hash": -4963749487858540000,
"line_mean": 26.4375,
"line_max": 103,
"alpha_frac": 0.5731776765,
"autogenerated": false,
"ratio": 4.1562130177514796,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0006572853565619347,
"num_lines": 128
} |
from functools import wraps
import multiprocessing
import numpy
import logging
import geodat.units
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
try:
import pyferret
PYFERRET_INSTALLED = True
_IMPORT_PYFERRET_ERROR = None
except ImportError:
logger.warning("Failed to load pyferret.")
PYFERRET_INSTALLED = False
_IMPORT_PYFERRET_ERROR = ImportError("Failed to load pyferret")
def num2fer(data, coords, dimunits,
varname="UNKNOWN", data_units=None, missing_value=None,
cartesian_axes=None, dimnames=None):
''' Create a dictionary that resemble the Ferret
data variable structure to be passed to pyferret.putdata
Args:
data (numpy.ndarray)
coords (a list of numpy.ndarray)
dimunits (a list of str): dimension units (e.g. ['months','degrees_N'])
varname (str, optional)
data_units (str, optional)
missing_value (numeric)
cartesian_axes (a list of characters): specifies the cartesian axes
e.g. ['T','Y','X']. If this is not specified, guesses will be made
using the dimension units (say unit month will be interpreted as a
[T]IME axis. Specifying cartesian_axes overwirtes the guesses.
dimnames (a list of str) - dimension names (e.g. ['time','lat','lon'])
Return:
dict
Length of cartesian_axes, dimnames, dimunits and coords need
to agree with the number of dimensions of data
'''
if not PYFERRET_INSTALLED:
raise _IMPORT_PYFERRET_ERROR
if len(dimunits) != data.ndim:
raise Exception("Number of dimunits does not match data.ndim")
if len(coords) != data.ndim:
raise Exception("Number of coords does not match data.ndim")
fer_var = {}
# Define the variable
fer_var['data'] = data.copy()
# Variable name
fer_var['name'] = varname
# Dataset
fer_var['dset'] = None
# Title = variable name
fer_var['title'] = fer_var['name']
# Set missing value
if missing_value is not None:
fer_var['missing_value'] = missing_value
# Set data unit
if data_units is not None:
fer_var['data_unit'] = data_units
# Determine the axis type
cax2ax_type = {'X': pyferret.AXISTYPE_LONGITUDE,
'Y': pyferret.AXISTYPE_LATITUDE,
'Z': pyferret.AXISTYPE_LEVEL,
'T': pyferret.AXISTYPE_CUSTOM}
# Make guessses for the axis type
if cartesian_axes is None:
cartesian_axes = [geodat.units.assign_caxis(dimunit)
for dimunit in dimunits]
if len(cartesian_axes) != data.ndim:
raise Exception("Number of cartesian_axes/dimunits does"+\
" not match data.ndim")
# Convert it to PyFerret convention
fer_var['axis_types'] = [cax2ax_type[cax]
if cax in cax2ax_type.keys()
else pyferret.AXISTYPE_NORMAL
for cax in cartesian_axes]
if dimnames is not None:
if len(dimnames) != data.ndim:
raise Exception("Number of dimnames does not match data.ndim")
fer_var['axis_names'] = dimnames
fer_var['axis_units'] = dimunits
fer_var['axis_coords'] = coords
# This will be used as the second argument to pyferret.putdata
axis_pos_dict = {'X': pyferret.X_AXIS,
'Y': pyferret.Y_AXIS,
'Z': pyferret.Z_AXIS,
'T': pyferret.T_AXIS}
# Force axis position
fer_var['axis_pos'] = [axis_pos_dict[cax]
if cax in axis_pos_dict.keys()
else cartesian_axes.index(cax)
for cax in cartesian_axes]
return fer_var
def fer2num(var):
''' Filter the dictionary returned by pyferret.getdata
PyFerret usually returns data with extra singlet dimension
Need to filter those
Args:
var (dict): as is returned by pyferret.getdata
Returns:
dict: {'data': a numpy ndarray, 'varname': the name of the variable,\n
'coords': a list of numpy ndarrays for the dimensions,
'dimunits': a list of strings, the units for the dimensions,
'dimnames': a list of strings, the names for the dimensions}
'''
if not PYFERRET_INSTALLED:
raise _IMPORT_PYFERRET_ERROR
results = {}
results['coords'] = [ax for ax in var['axis_coords']
if ax is not None]
if var['axis_names'] is not None:
results['dimnames'] = [var['axis_names'][i]
for i in range(len(var['axis_names']))
if var['axis_coords'][i] is not None]
# If the axis_type is TIME, the axis_unit is the calendar type which
# is not considered yet
if pyferret.AXISTYPE_TIME in var['axis_types']:
raise Exception("Immature function: axis_type from Ferret is TIME,"+\
"not CUSTOM; a situation not taken into yet.")
results['dimunits'] = [var['axis_units'][i]
for i in range(len(var['axis_units']))
if var['axis_coords'][i] is not None]
sliceobj = [0 if ax is None else slice(None)
for ax in var['axis_coords']]
results['data'] = var['data'][sliceobj]
results['varname'] = var['title']
return results
def run_worker(f):
''' A workaround for clearing memory used by PyFerret
'''
@wraps(f)
def run_func(*args, **kwargs):
P = multiprocessing.Pool(1)
result = P.apply(f, args, kwargs)
P.close()
P.terminate()
P.join()
return result
return run_func
def regrid_once_primitive(var, ref_var, axis,
verbose=False, prerun=None, transform='@ave'):
''' A generic function that regrids a variable without the dependence of
geodat.nc.Variable
Args:
var (dict) : arguments for num2fer
Required keys: data,coords,dimunits
ref_var (dict) : arguments for num2fer.
This supplies the grid for regridding
Required keys: coords,dimunits
axis (str) : the axis for regridding e.g. 'X'/'Y'/'XY'/"YX"
verbose (bool) : whether to print progress (default: False)
prerun (a list of str) : commands to be run at the start (default: None)
transform (str): "@ave" (Conserve area average),
"@lin" (Linear interpolation),...see Ferret doc
Returns:
dict
'''
if not PYFERRET_INSTALLED:
raise _IMPORT_PYFERRET_ERROR
pyferret.start(quiet=True, journal=verbose,
verify=False, server=True)
# commands to run before regridding
if prerun is not None:
if type(prerun) is str:
pyferret.run(prerun)
elif type(prerun) is list:
for s in prerun:
if type(s) is str:
pyferret.run(prerun)
else:
raise Exception("prerun has to be either a string or "+\
"a list of string")
else:
raise Exception("prerun has to be either a string or a list of "+\
"string")
assert isinstance(axis, str)
axis = axis.upper()
# Make sure axis is a string denoting X or Y axis
#if axis not in ['X', 'Y', 'XY', 'YX']:
# raise Exception("Currently axis can only be X/Y/XY")
# Construct the source data read by pyferret.putdata
source_fer = num2fer(**var)
source_fer["name"] = "source"
# Fill in unnecessary input for Ferret
if "data" not in ref_var:
ref_var['data'] = numpy.zeros((1,)*len(ref_var['coords']))
# Construct the destination data read by pyferret.putdata
dest_fer = num2fer(**ref_var)
dest_fer["name"] = "dest"
if verbose:
print source_fer
print dest_fer
pyferret.putdata(source_fer, axis_pos=source_fer['axis_pos'])
if verbose:
print "Put source variable"
pyferret.run('show grid source')
pyferret.putdata(dest_fer, axis_pos=dest_fer['axis_pos'])
if verbose:
print "Put destination variable"
pyferret.run('show grid dest')
pyfer_command = 'let result = source[g'+axis.lower()+'=dest'+transform+']'
pyferret.run(pyfer_command)
if verbose:
print "Regridded in FERRET"
pyferret.run('show grid result')
# Get results
result_ref = pyferret.getdata('result')
if verbose: print "Get data from FERRET"
# Convert from ferret data structure to geodat.nc.Variable
tmp_result = fer2num(result_ref)
if 'varname' in var:
tmp_result['varname'] = var['varname']
tmp_caxes = [geodat.units.assign_caxis(dimunit)
for dimunit in tmp_result['dimunits']]
var_caxes = [geodat.units.assign_caxis(dimunit)
for dimunit in var['dimunits']]
# Preserve dimension order (Ferret reverts the order)
neworder = [tmp_caxes.index(cax)
for cax in var_caxes]
# Change the dimension order of the result to match with the input
tmp_result['coords'] = [tmp_result['coords'][iax] for iax in neworder]
tmp_result['dimunits'] = [tmp_result['dimunits'][iax] for iax in neworder]
if 'dimnames' in tmp_result:
tmp_result['dimnames'] = [tmp_result['dimnames'][iax]
for iax in neworder]
tmp_result['data'] = tmp_result['data'].transpose(neworder).astype(
var['data'].dtype)
# Return the input var with the data and dimensions replaced by
# the regridded ones
var.update(tmp_result)
result = var
status = pyferret.stop()
if verbose:
if status:
print "PyFerret stopped."
else:
print "PyFerret failed to stop."
return result
regrid_primitive = run_worker(regrid_once_primitive)
if __name__ == '__main__':
import scipy.io.netcdf as netcdf
ncfile_low = netcdf.netcdf_file("land_mask_lowres.nc")
newvar = dict(data=ncfile_low.variables['land_mask'].data,
coords=[ncfile_low.variables[dim].data
for dim in ncfile_low.variables['land_mask'].\
dimensions],
dimunits=[ncfile_low.variables[dim].units
for dim in ncfile_low.variables['land_mask'].\
dimensions])
ncfile_high = netcdf.netcdf_file("land_mask_highres.nc")
var_high = dict(data=ncfile_high.variables['land_mask'].data,
coords=[ncfile_high.variables[dim].data
for dim in ncfile_high.variables['land_mask'].\
dimensions],
dimunits=[ncfile_high.variables[dim].units
for dim in ncfile_high.variables['land_mask'].\
dimensions])
regridded = regrid_primitive(var_high, newvar, 'XY')
| {
"repo_name": "kitchoi/geodat",
"path": "geodat/pyferret_func.py",
"copies": "1",
"size": "11096",
"license": "mit",
"hash": 8668421425789902000,
"line_mean": 36.110367893,
"line_max": 80,
"alpha_frac": 0.5852559481,
"autogenerated": false,
"ratio": 3.7844474761255116,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48697034242255116,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import NodeDefender
from NodeDefender.mqtt.message.report.sensor import info
def verify_sensor_and_class(func):
@wraps(func)
def wrapper(topic, payload):
if not NodeDefender.db.sensor.get(topic['mac_address'], topic['node']):
NodeDefender.db.sensor.create(topic['mac_address'], topic['node'])
if not NodeDefender.db.commandclass.get(\
topic['mac_address'], topic['node'],\
classname = topic['commandClass']):
pass
return func(topic, payload)
return wrapper
@verify_sensor_and_class
def event(topic, payload):
if topic['commandClass'] == 'info':
return eval('info.' + topic['action'])(topic, payload)
elif topic['subFunction']:
if topic['subFunction'] == 'sup':
return info.sup(topic, payload)
elif topic['subFunction'] == 'evtsup':
return info.evtsup(topic, payload)
return NodeDefender.icpe.sensor.event(topic['mac_address'], topic['node'],\
topic['commandClass'], **payload)
| {
"repo_name": "CTSNE/NodeDefender",
"path": "NodeDefender/mqtt/message/report/sensor/__init__.py",
"copies": "1",
"size": "1139",
"license": "mit",
"hash": -6210535532792508000,
"line_mean": 41.1851851852,
"line_max": 81,
"alpha_frac": 0.5926251097,
"autogenerated": false,
"ratio": 4.156934306569343,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5249559416269343,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import numbers
import uuid
import six
from decent.error import Error, Invalid
## Helpers
def All(*validators):
"""
Combines all the given validator callables into one, running all the
validators in sequence on the given value.
"""
@wraps(All)
def built(value):
for validator in validators:
value = validator(value)
return value
return built
def Any(*validators):
"""
Combines all the given validator callables into one, running the given
value through them in sequence until a valid result is given.
"""
@wraps(Any)
def built(value):
error = None
for validator in validators:
try:
return validator(value)
except Error as e:
error = e
raise error
return built
def Maybe(validator):
"""
Wraps the given validator callable, only using it for the given value if it
is not ``None``.
"""
@wraps(Maybe)
def built(value):
if value != None:
return validator(value)
return built
def Msg(validator, message):
"""
Wraps the given validator callable, replacing any error messages raised.
"""
@wraps(Msg)
def built(value):
try:
return validator(value)
except Error as e:
e.message = message
raise e
return built
def Default(default):
"""
Creates a validator callable that replaces ``None`` with the specified
default value.
"""
@wraps(Default)
def built(value):
if value == None:
return default
return value
return built
## Basics
def Eq(value, message="Not equal to {!s}"):
"""
Creates a validator that compares the equality of the given value to
``value``.
A custom message can be specified with ``message``. It will be formatted
with ``value``.
"""
@wraps(Eq)
def built(_value):
if _value != value:
raise Error(message.format(value))
return _value
return built
def Type(expected, message="Not of type {}"):
"""
Creates a validator that compares the type of the given value to
``expected``. This is a direct type() equality check. Also see
``Instance``, which is an isinstance() check.
A custom message can be specified with ``message``.
"""
@wraps(Type)
def built(value):
if type(value) != expected:
raise Error(message.format(expected.__name__))
return value
return built
def Instance(expected, message="Not an instance of {}"):
"""
Creates a validator that checks if the given value is an instance of
``expected``.
A custom message can be specified with ``message``.
"""
@wraps(Instance)
def built(value):
if not isinstance(value, expected):
raise Error(message.format(expected.__name__))
return value
return built
def Coerce(type, message="Not a valid {} value"):
"""
Creates a validator that attempts to coerce the given value to the
specified ``type``. Will raise an error if the coercion fails.
A custom message can be specified with ``message``.
"""
@wraps(Coerce)
def built(value):
try:
return type(value)
except (TypeError, ValueError) as e:
raise Error(message.format(type.__name__))
return built
## Collections
def List(validator):
"""
Creates a validator that runs the given validator on every item in a list
or other collection. The validator can mutate the values.
Any raised errors will be collected into a single ``Invalid`` error. Their
paths will be replaced with the index of the item. Will raise an error if
the input value is not iterable.
"""
@wraps(List)
def built(value):
if not hasattr(value, '__iter__'):
raise Error("Must be a list")
invalid = Invalid()
for i, item in enumerate(value):
try:
value[i] = validator(item)
except Invalid as e:
for error in e:
error.path.insert(0, i)
invalid.append(error)
except Error as e:
e.path.insert(0, i)
invalid.append(e)
if len(invalid):
raise invalid
return value
return built
## Booleans
def Boolean():
"""
Creates a validator that attempts to convert the given value to a boolean
or raises an error. The following rules are used:
``None`` is converted to ``False``.
``int`` values are ``True`` except for ``0``.
``str`` values converted in lower- and uppercase:
* ``y, yes, t, true``
* ``n, no, f, false``
"""
@wraps(Boolean)
def built(value):
# Already a boolean?
if isinstance(value, bool):
return value
# None
if value == None:
return False
# Integers
if isinstance(value, int):
return not value == 0
# Strings
if isinstance(value, str):
if value.lower() in { 'y', 'yes', 't', 'true' }:
return True
elif value.lower() in { 'n', 'no', 'f', 'false' }:
return False
# Nope
raise Error("Not a boolean value.")
return built
## Numbers
def Range(min=None, max=None, min_message="Must be at least {min}", max_message="Must be at most {max}"):
"""
Creates a validator that checks if the given numeric value is in the
specified range, inclusive.
Accepts values specified by ``numbers.Number`` only, excluding booleans.
The error messages raised can be customized with ``min_message`` and
``max_message``. The ``min`` and ``max`` arguments are formatted.
"""
@wraps(Range)
def built(value):
if not isinstance(value, numbers.Number) or isinstance(value, bool):
raise Error("Not a number")
if min is not None and min > value:
raise Error(min_message.format(min=min, max=max))
if max is not None and value > max:
raise Error(max_message.format(min=min, max=max))
return value
return built
def Length(min=None, max=None, min_message="Must have a length of at least {min}", max_message="Must have a length of at most {max}"):
"""
Creates a validator that checks if the given value's length is in the
specified range, inclusive. (Returns the original value.)
See :func:`.Range`.
"""
validator = Range(min, max, min_message, max_message)
@wraps(Length)
def built(value):
if not hasattr(value, '__len__'):
raise Error("Does not have a length")
validator(len(value))
return value
return built
## Strings
def _string_function(value, name):
if not isinstance(value, six.string_types):
raise Error("Must be a string")
return getattr(value, name)()
def Lower():
"""
Creates a validator that converts the input string to lowercase. Will raise
an error for non-string types.
"""
@wraps(Lower)
def built(value):
return _string_function(value, 'lower')
return built
def Upper():
"""
Creates a validator that converts the input string to UPPERCASE. Will raise
an error for non-string types.
"""
@wraps(Upper)
def built(value):
return _string_function(value, 'upper')
return built
def Strip():
"""
Creates a validator that strips the input string of whitespace. Will raise
an error for non-string types.
"""
@wraps(Strip)
def built(value):
return _string_function(value, 'strip')
return built
def NotEmpty():
"""
Creates a validator that validates the given string is not empty. Will
raise an error for non-string types.
"""
@wraps(NotEmpty)
def built(value):
if not isinstance(value, six.string_types) or not value:
raise Error("Must not be empty")
return value
return built
## String conversions
def Uuid(to_uuid=True):
"""
Creates a UUID validator. Will raise an error for non-string types and
non-UUID values.
The given value will be converted to an instance of ``uuid.UUID`` unless
``to_uuid`` is ``False``.
"""
@wraps(Uuid)
def built(value):
invalid = Error("Not a valid UUID")
if isinstance(value, uuid.UUID):
return value
elif not isinstance(value, six.string_types):
raise invalid
try:
as_uuid = uuid.UUID(value)
except (ValueError, AttributeError) as e:
raise invalid
if to_uuid:
return as_uuid
return value
return built
| {
"repo_name": "veeti/decent",
"path": "decent/validators.py",
"copies": "1",
"size": "8820",
"license": "mit",
"hash": -7574167442853030000,
"line_mean": 26.1384615385,
"line_max": 134,
"alpha_frac": 0.5987528345,
"autogenerated": false,
"ratio": 4.323529411764706,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5422282246264706,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import numpy as np
import xarray as xr
def xarray_loop_vars_over_dim(vars_to_loop, loop_dim):
"""
A decorator to feed CML data with mutiple channels as xarray.DataArrays to CML processing functions
Parameters
----------
vars_to_loop: list of strings
List of the names of the variables used as kwargs in the decorated function
which should have a dimension `loop_dim` for which the decorated function is
then applied individually to each item when looping over `loop_dim`.
loop_dim: basestring
Name of the dimension which all variables in `vars_to_loop` must have in common
and which will be looped over to apply the decorated function.
Examples
--------
Here is an example for how this decorator is used for the WAA Schleiss function::
@xarray_loop_vars_over_dim(vars_to_loop=["rsl", "baseline", "wet"], loop_dim="channel_id")
def waa_schleiss_2013(rsl, baseline, wet, waa_max, delta_t, tau):
# function body...
Here, `delta_t` and `tau` are not CML data xarray.DataArrays and hence do not
have to be looped over.
"""
def decorator(func):
@wraps(func)
def inner(*args, **kwargs):
# TODO: Maybe check if all args or kwargs are the same type
# The case with numpy array as arg
if len(args) > 0 and isinstance(args[0], np.ndarray):
return func(*args, **kwargs)
# The case with numpy array as kwarg
if len(kwargs.keys()) > 0 and isinstance(
kwargs[vars_to_loop[0]], np.ndarray
):
return func(*args, **kwargs)
# The dummy case where nothing is passed. This is just to get the
# functions error message here instead of continuing to the loop below
if len(args) == 0 and len(kwargs) == 0:
return func(*args, **kwargs)
loop_dim_id_list = list(
np.atleast_1d(kwargs[vars_to_loop[0]][loop_dim].values)
)
if len(loop_dim_id_list) > 1:
kwargs_vars_to_loop = {var: kwargs.pop(var) for var in vars_to_loop}
data_list = []
for loop_dim_id in loop_dim_id_list:
for var in vars_to_loop:
kwargs[var] = kwargs_vars_to_loop[var].sel(
{loop_dim: loop_dim_id}
).values
data_list.append(func(**kwargs))
return xr.DataArray(
data=np.stack(data_list),
dims=(loop_dim, "time"),
coords={
loop_dim: kwargs_vars_to_loop[vars_to_loop[0]][loop_dim].values,
"time": kwargs_vars_to_loop[vars_to_loop[0]].time,
},
)
else:
return xr.DataArray(
data=func(**kwargs),
dims=("time"),
coords={"time": kwargs[vars_to_loop[0]].time},
)
return inner
return decorator
| {
"repo_name": "pycomlink/pycomlink",
"path": "pycomlink/processing/xarray_wrapper.py",
"copies": "2",
"size": "3162",
"license": "bsd-3-clause",
"hash": 178416684743740800,
"line_mean": 39.5384615385,
"line_max": 103,
"alpha_frac": 0.541429475,
"autogenerated": false,
"ratio": 4.127937336814622,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5669366811814622,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import operator as op
class TypeCheckError(TypeError):
pass
def type_assertion(cond, error, msg=''):
if not cond:
raise error(msg)
return True
def assert_type(*types, **kw):
"""
`*tps`: types that function should accept
keywords:
@ret_: return type that function should have
@at_least: minimum args decorated function should have
TODO: any other: types that kwargs should have
"""
raw_types = map(type, types)
all_raw = map(lambda x: x == type, raw_types)
# all_raw = map(lambda x: isinstance(x, type), raw_types)
w = 'Either all of `types` should be raw, or none should be'
type_assertion(all(all_raw) or not any(all_raw), TypeCheckError, w)
# all_typed = map(lambda t: isinstance(t, Type), raw_types)
all_typed = map(lambda t: isinstance(t, Type), types)
w = 'Either all of `types` should be instance of Type, or none should be'
type_assertion(all(all_typed) or not any(all_typed), TypeCheckError, w)
if not all(all_typed):
types = map(get_type, types)
at_least = kw.pop('nlt', 0)
at_most = kw.pop('ngt', float('inf'))
exp_ret_type = kw.pop('ret_', None)
if exp_ret_type is not None:
exp_ret_type = get_type(exp_ret_type) if not isinstance(exp_ret_type, Type) else exp_ret_type
def deco(f):
@wraps(f)
def wrapped(*a, **kw):
w = '{} needs at least {} args. {} passed.'.format(f.__name__, at_least, len(a))
type_assertion(len(a) >= at_least, TypeCheckError, w)
w = '{} needs no more than {} args. {} passed.'.format(f.__name__, at_most, len(a))
type_assertion(len(a) <= at_most, TypeCheckError, w)
in_types = tuple(map(get_type, a))
eqs = map(op.eq, types, in_types)
w = 'Required types {} != {}'.format(types, in_types)
type_assertion(all(eqs), TypeCheckError, w)
res = f(*a, **kw)
# Check return type
if exp_ret_type is not None:
ret_type = get_type(res)
assert exp_ret_type == ret_type, 'Required return type {} != {}'.format(exp_ret_type, ret_type)
return res
wrapped.types = types
wrapped.outtypes = exp_ret_type
return wrapped
return deco
class Any(type):
def __eq__(self, other):
return True
def __ne__(self, other):
return False
class Type(object):
def __init__(self, typ):
assert isinstance(typ, type) or typ == Any
self.typ = typ
if typ not in Type._type_list:
Type._type_list[typ] = self
def __hash__(self):
return hash(self.typ)
def __cmp__(self, other):
return cmp(self.typ, other.typ)
def __repr__(self):
if hasattr(self.typ, '__name__'):
return self.typ.__name__.capitalize() + "'"
return self.__class__.__name__ + "'"
def type_repr(self, x):
if isinstance(x, type):
return x.__name__
return repr(x)
def __eq__(self, other):
same_type = type(self) == type(other)
anys = self.typ == other.typ or Any in {self.typ, other.typ}
return same_type and anys
if hasattr(self, 'typ'):
return self.typ.__name__.capitalize() + "'"
return self.__class__.__name__ + "'"
def __ne__(self, other):
return not self.__eq__(other)
_type_list = {} # TODO: Function to register new items on the fly
class TypeContainer(Type):
def __init__(self, typ):
super(self.__class__, self).__init__(typ)
self.ordered = typ in {tuple, Pair}
self.cont_holder = tuple if self.ordered else (lambda x: sorted(set(x)))
def copy(self):
new_self = self.__class__(self.typ)
return new_self
def __call__(self, *args): # TODO: return copy of self that isn't callable
assert all([isinstance(a, (type, Type)) for a in args]), 'Must only pass bare types'
ensure_type = lambda x: Type(x) if not isinstance(x, Type) else x
new_self = self.copy()
new_self.cont_types = self.cont_holder(map(ensure_type, args))
return new_self
def __repr__(self):
conts = getattr(self, 'cont_types', [])
if self.typ == Pair:
k, v = conts
return '{}=>{}'.format(k, v)
if self.typ == tuple:
return '({})'.format(', '.join(map(self.type_repr, conts)))
base_r = super(self.__class__, self).__repr__()
return '{} [{}]'.format(base_r, ' '.join(map(self.type_repr, conts)))
def __eq__(self, other):
base_eq = super(self.__class__, self).__eq__(other)
return base_eq and (self.cont_types == other.cont_types)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.typ) + hash(tuple(self.cont_types))
cont_types = []
class Pair(tuple):
"Use for representation of dict types"
def __new__(cls, *a):
try:
if len(a) == 2:
k, v = a
else:
k, v = a[0]
except ValueError:
raise ValueError('Pairs only allow for 2 values')
return super(Pair, cls).__new__(cls, (k, v))
def get_type(x):
if isinstance(x, Type):
return x
if isinstance(x, (str, unicode, file)):
return Type(type(x))
if isinstance(x, type):
return Type(x)
if not hasattr(x, '__iter__') and x == Any:
return Type(x)
try: # container type
_dct = isinstance(x, dict)
iter_ = (lambda x: map(Pair, x.items())) if _dct else iter
type_wrapper = TypeContainer(type(x))
cont_types = type_wrapper.cont_holder(map(get_type, iter_(x)))
# TODO: special `iter` infinite lists
return type_wrapper(*cont_types)
except (TypeError, KeyError): # KeyError: redis.client tries to iterate by giving self wrong key
return Type(type(x))
| {
"repo_name": "d10genes/typecheck.py",
"path": "typechecker.py",
"copies": "1",
"size": "6018",
"license": "bsd-3-clause",
"hash": 4801108309648688000,
"line_mean": 30.5078534031,
"line_max": 111,
"alpha_frac": 0.5579926886,
"autogenerated": false,
"ratio": 3.5567375886524824,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46147302772524823,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import operator
def err(f):
@wraps(f)
def wrapper(*a,**kw):
try:
return f(*a,**kw)
except Exception as e:
return DotDict()
return wrapper
class DotDict(dict):
__getattr__ = dict.__getitem__
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
def __init__(self, dct={}):
for key, value in dct.items():
if hasattr(value, 'keys'):
value = DotDict(value)
self[key] = value
def __missing__(self, key):
self[key] = DotDict()
return self[key]
def dotset(obj, keys, value):
attrs = keys.split('.')
target = obj
if '.' in keys:
target = dotget(obj, '.'.join(attrs[:-1]))
setattr(target, attrs[-1], value)
def dotget(obj, attr):
return reduce(getattr, attr.split('.'), obj)
def compare_object_values(a, b, attrs, cmp=lambda x,y: x==y, cmp_value=lambda x: x, flat=True, allow_empty_values=False):
ac = DotDict()
bc = DotDict()
changes = DotDict()
for key in attrs:
setattr(ac, key, dotget(a, key))
setattr(bc, key, dotget(b, key))
for key in attrs:
if not cmp(dotget(ac, key), dotget(bc, key)):
value = dotget(ac, cmp_value(key))
if not allow_empty_values and not value:
continue
if flat:
setattr(changes, key, dotget(ac, key))
else:
dotset(changes, key, dotget(ac, key))
return changes
| {
"repo_name": "futurice/google-apps-contacts-copier",
"path": "shared/dots.py",
"copies": "1",
"size": "1527",
"license": "bsd-3-clause",
"hash": -3392198599149352400,
"line_mean": 28.3653846154,
"line_max": 121,
"alpha_frac": 0.540929928,
"autogenerated": false,
"ratio": 3.592941176470588,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4633871104470588,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import operator
def opsame(op):
""" checks for type mismatch between first and second argument"""
@wraps(op)
def checked(self, other):
if not isinstance(other, type(self)):
return NotImplemented
return op(self, other)
return checked
def get_op(name):
if '_' not in name:
name = '__{}__'.format(name)
return getattr(operator, name), name
def mkop_wraped(cls, name, by=None):
"""
forward operator with first argument wraped inside cls instances
Parameters
----------
cls : type
class to wrap create wrapping
name : str
name of the operator
Return
------
op : method
forward operator implementation
"""
op, name = get_op(name)
@wraps(getattr(cls, name))
def fwd_op(self, *args):
wrapped = getattr(self, by) if by else cls(self)
return op(wrapped, *args)
return fwd_op
def mkop_reflect(cls, name, by=None):
"""
reflected forward operator with second argument wraped inside cls instance
Parameters
----------
cls : type
class to wrap create wrapping
name : str
name of the non-reflected operator
Return
------
op : method
forward operator implementation
"""
op, name = get_op(name)
@wraps(getattr(cls, name))
def reflect_op(self, other):
wrapped = getattr(self, by) if by else cls(self)
return op(other, wrapped)
return reflect_op
def autowraped_ops(cls, by=None, reflect=True):
"""
Creates a dynamic mixin with operator forwarding to wraped instances
Parameters
----------
cls : type
class that the object
by : str
instance attribute that is used to constructed wrapped objects
reflect : bool
also create reflected operator wrappings
Return
------
mixin : type
dynamic mixin class with operator definitions
"""
ops = {}
special = set(dir(object))
for name in dir(operator):
if (name in special or
not name.startswith('__') or not name.endswith('__')):
continue
rname = '__r{}__'.format(name.strip('_'))
if hasattr(cls, name):
ops[name] = mkop_wraped(cls, name, by=by)
if reflect:
ops[rname] = mkop_reflect(cls, name, by=by)
return type('Fwd' + cls.__name__, (object,), ops)
| {
"repo_name": "wabu/pyadds",
"path": "pyadds/ops.py",
"copies": "1",
"size": "2460",
"license": "mit",
"hash": -2893495618175582700,
"line_mean": 21.7777777778,
"line_max": 78,
"alpha_frac": 0.5845528455,
"autogenerated": false,
"ratio": 4.183673469387755,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5268226314887755,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os, json
from flask import Flask, render_template, request, g, session, flash, \
redirect, url_for, abort, send_from_directory, request, Response
from flask.ext.openid import OpenID
from flask.ext.autoindex import AutoIndex
app = Flask(__name__)
app.secret_key = '\xa5\x10\xbfN3\x1f\t\xd0ec\xa1\xe8\xe7B\x1dU4!\xa1N@\xcf\xfe\xa2'
idx=AutoIndex(app, add_url_rules=False, browse_root="templates")
#idx=AutoIndex(app, add_url_rules=False, browse_root="../..")
oid = OpenID(app)
@app.before_request
def before_request():
g.user = None
if 'openid' in session:
pass
def check_auth():
if 'DOMAIN' in os.environ:
if 'openid' in session:
return True
return False
else:
return True
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not check_auth():
return redirect(url_for('login', next=request.url))
return f(*args, **kwargs)
return decorated
@app.route('/login', methods=['GET', 'POST'])
@oid.loginhandler
def login():
if not 'DOMAIN' in os.environ:
return redirect(url_for('/'))
domain = os.environ['DOMAIN']
return oid.try_login("https://www.google.com/accounts/o8/site-xrds?hd=%s" % domain )
@oid.after_login
def create_or_login(resp):
"""This is called when login with OpenID succeeded and it's not
necessary to figure out if this is the users's first login or not.
This function has to redirect otherwise the user will be presented
with a terrible URL which we certainly don't want.
"""
session['openid'] = resp.identity_url
return redirect(oid.get_next_url())
@app.route('/logout')
def logout():
session.pop('openid', None)
return redirect(oid.get_next_url())
@app.route('/')
@app.route('/<path:path>')
@requires_auth
def autoindex(path="."):
return idx.render_autoindex(path)
if __name__ == '__main__':
port = int(os.environ.get("PORT", 5000))
if 'DEBUG' in os.environ:
if os.environ['DEBUG'] == 'True':
DEBUG=True
else:
DEBUG=False
else:
DEBUG = False
app.run(debug=DEBUG, host='0.0.0.0', port=port)
| {
"repo_name": "baloo/sbt-repo-buildpack",
"path": "src/app/app.py",
"copies": "2",
"size": "2239",
"license": "mit",
"hash": -6654188274304083000,
"line_mean": 26.9875,
"line_max": 88,
"alpha_frac": 0.6400178651,
"autogenerated": false,
"ratio": 3.3417910447761194,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.498180890987612,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
from contextlib import contextmanager
import platform
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
class DefaultedAttributes(object):
def __init__(self, underlying, defaults):
self.underlying = underlying
self.defaults = defaults
def __getattr__(self, name):
if hasattr(self.underlying, name):
return getattr(self.underlying, name)
try:
return self.defaults[name]
except KeyError, error:
raise AttributeError("'%s' object has no attribute '%s'" % (self.underlying.__class__.__name__, name))
# WARNING: This is a near copy from django.template.loader.find_template_loader. Maybe I'm blind, but despite django's
# heavy use of string imports I couldn't find an exposed utility function like this in django's source.
def get_callable(callable):
if isinstance(callable, basestring):
module, attr = callable.rsplit('.', 1)
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error importing ajaxerrors callable %s: "%s"' % (callable, e))
try:
callable = getattr(mod, attr)
except AttributeError, e:
raise ImproperlyConfigured('Error importing ajaxerrors callable %s: "%s"' % (callable, e))
return callable
def only_on(system):
def decor(func):
@wraps(func)
def callable(*args, **kwargs):
if platform.system() != system:
return
return func(*args, **kwargs)
return callable
return decor
@contextmanager
def altered_umask(umask):
old_umask = os.umask(umask)
try:
yield
finally:
os.umask(old_umask)
| {
"repo_name": "yaniv-aknin/django-ajaxerrors",
"path": "ajaxerrors/utils.py",
"copies": "1",
"size": "1815",
"license": "mit",
"hash": 5499710926605902000,
"line_mean": 33.9038461538,
"line_max": 118,
"alpha_frac": 0.6429752066,
"autogenerated": false,
"ratio": 4.448529411764706,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5591504618364705,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
from moto.xray import xray_backends
import aws_xray_sdk.core
from aws_xray_sdk.core.context import Context as AWSContext
from aws_xray_sdk.core.emitters.udp_emitter import UDPEmitter
class MockEmitter(UDPEmitter):
"""
Replaces the code that sends UDP to local X-Ray daemon
"""
def __init__(self, daemon_address='127.0.0.1:2000'):
address = os.getenv('AWS_XRAY_DAEMON_ADDRESS_YEAH_NOT_TODAY_MATE', daemon_address)
self._ip, self._port = self._parse_address(address)
def _xray_backend(self, region):
return xray_backends[region]
def send_entity(self, entity):
# Hack to get region
# region = entity.subsegments[0].aws['region']
# xray = self._xray_backend(region)
# TODO store X-Ray data, pretty sure X-Ray needs refactor for this
pass
def _send_data(self, data):
raise RuntimeError('Should not be running this')
def mock_xray_client(f):
"""
Mocks the X-Ray sdk by pwning its evil singleton with our methods
The X-Ray SDK has normally been imported and `patched()` called long before we start mocking.
This means the Context() will be very unhappy if an env var isnt present, so we set that, save
the old context, then supply our new context.
We also patch the Emitter by subclassing the UDPEmitter class replacing its methods and pushing
that itno the recorder instance.
"""
@wraps(f)
def _wrapped(*args, **kwargs):
print("Starting X-Ray Patch")
old_xray_context_var = os.environ.get('AWS_XRAY_CONTEXT_MISSING')
os.environ['AWS_XRAY_CONTEXT_MISSING'] = 'LOG_ERROR'
old_xray_context = aws_xray_sdk.core.xray_recorder._context
old_xray_emitter = aws_xray_sdk.core.xray_recorder._emitter
aws_xray_sdk.core.xray_recorder._context = AWSContext()
aws_xray_sdk.core.xray_recorder._emitter = MockEmitter()
try:
return f(*args, **kwargs)
finally:
if old_xray_context_var is None:
del os.environ['AWS_XRAY_CONTEXT_MISSING']
else:
os.environ['AWS_XRAY_CONTEXT_MISSING'] = old_xray_context_var
aws_xray_sdk.core.xray_recorder._emitter = old_xray_emitter
aws_xray_sdk.core.xray_recorder._context = old_xray_context
return _wrapped
class XRaySegment(object):
"""
XRay is request oriented, when a request comes in, normally middleware like django (or automatically in lambda) will mark
the start of a segment, this stay open during the lifetime of the request. During that time subsegments may be generated
by calling other SDK aware services or using some boto functions. Once the request is finished, middleware will also stop
the segment, thus causing it to be emitted via UDP.
During testing we're going to have to control the start and end of a segment via context managers.
"""
def __enter__(self):
aws_xray_sdk.core.xray_recorder.begin_segment(name='moto_mock', traceid=None, parent_id=None, sampling=1)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
aws_xray_sdk.core.xray_recorder.end_segment()
| {
"repo_name": "Affirm/moto",
"path": "moto/xray/mock_client.py",
"copies": "8",
"size": "3225",
"license": "apache-2.0",
"hash": 1833060008155643100,
"line_mean": 37.8554216867,
"line_max": 125,
"alpha_frac": 0.6737984496,
"autogenerated": false,
"ratio": 3.5833333333333335,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8257131782933333,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
from os.path import dirname
from threading import Thread
from fabric.api import *
from flask import flash
from pbkdf2 import crypt
from vagrant import Vagrant
from MAdmin import db
from MAdmin.sql.ORM import Device
def hash_password(password):
return crypt(password)
def verify_password(password_hash, guessed_password):
if password_hash == crypt(guessed_password, password_hash):
# Password correct
return True
else:
# Password incorrect
return False
def flash_errors(form):
for field, errors in form.errors.items():
for error in errors:
flash(u"%s" % error)
def async(f):
@wraps(f)
def wrapper(*args, **kwargs):
thr = Thread(target=f, args=args, kwargs=kwargs)
thr.start()
return wrapper
@async
def make_box(hostname, device_os):
os.chdir(dirname(dirname(dirname(__file__))) + '/boxes')
if not os.path.exists(hostname):
os.mkdir(hostname)
os.chdir(hostname)
file_location = os.getcwd()
box = Vagrant(quiet_stderr=False, quiet_stdout=False, root=file_location)
try:
box.box_add('chef/centos-7.0', 'https://atlas.hashicorp.com/chef/boxes/centos-7.0')
box.box_add('ubuntu/trusty64', 'https://atlas.hashicorp.com/ubuntu/boxes/trusty64')
except:
print 'Box probably already exists'
box.init(box_name=device_os)
box.up()
env.hosts = [box.user_hostname_port()]
env.key_filename = box.keyfile()
env.disable_known_hosts = True
device = Device.query.filter(Device.hostname == hostname).first()
device.box_online = True
device.pending_boot = False
db.session.commit()
| {
"repo_name": "MattTuri/M-Admin",
"path": "MAdmin/utils/__init__.py",
"copies": "1",
"size": "1707",
"license": "mit",
"hash": -7899169037185777000,
"line_mean": 23.7391304348,
"line_max": 91,
"alpha_frac": 0.6672524897,
"autogenerated": false,
"ratio": 3.476578411405295,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4643830901105295,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
from platform import system as _curos
import re
import subprocess
CUR_OS = _curos()
IS_WIN = CUR_OS in ['Windows', 'cli']
IS_NIX = (not IS_WIN) and any(
CUR_OS.startswith(i) for i in
['CYGWIN', 'MSYS', 'Linux', 'Darwin', 'SunOS',
'FreeBSD', 'NetBSD', 'OpenBSD'])
RE_ANSI = re.compile(r"\x1b\[[;\d]*[A-Za-z]")
# Py2/3 compat. Empty conditional to avoid coverage
if True: # pragma: no cover
try:
_range = xrange
except NameError:
_range = range
try:
_unich = unichr
except NameError:
_unich = chr
try:
_unicode = unicode
except NameError:
_unicode = str
try:
if IS_WIN:
import colorama
else:
raise ImportError
except ImportError:
colorama = None
else:
try:
colorama.init(strip=False)
except TypeError:
colorama.init()
try:
from weakref import WeakSet
except ImportError:
WeakSet = set
try:
_basestring = basestring
except NameError:
_basestring = str
try: # py>=2.7,>=3.1
from collections import OrderedDict as _OrderedDict
except ImportError:
try: # older Python versions with backported ordereddict lib
from ordereddict import OrderedDict as _OrderedDict
except ImportError: # older Python versions without ordereddict lib
# Py2.6,3.0 compat, from PEP 372
from collections import MutableMapping
class _OrderedDict(dict, MutableMapping):
# Methods with direct access to underlying attributes
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at 1 argument, got %d',
len(args))
if not hasattr(self, '_keys'):
self._keys = []
self.update(*args, **kwds)
def clear(self):
del self._keys[:]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
self._keys.append(key)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
self._keys.remove(key)
def __iter__(self):
return iter(self._keys)
def __reversed__(self):
return reversed(self._keys)
def popitem(self):
if not self:
raise KeyError
key = self._keys.pop()
value = dict.pop(self, key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
inst_dict.pop('_keys', None)
return self.__class__, (items,), inst_dict
# Methods with indirect access via the above methods
setdefault = MutableMapping.setdefault
update = MutableMapping.update
pop = MutableMapping.pop
keys = MutableMapping.keys
values = MutableMapping.values
items = MutableMapping.items
def __repr__(self):
pairs = ', '.join(map('%r: %r'.__mod__, self.items()))
return '%s({%s})' % (self.__class__.__name__, pairs)
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
class FormatReplace(object):
"""
>>> a = FormatReplace('something')
>>> "{:5d}".format(a)
'something'
"""
def __init__(self, replace=''):
self.replace = replace
self.format_called = 0
def __format__(self, _):
self.format_called += 1
return self.replace
class Comparable(object):
"""Assumes child has self._comparable attr/@property"""
def __lt__(self, other):
return self._comparable < other._comparable
def __le__(self, other):
return (self < other) or (self == other)
def __eq__(self, other):
return self._comparable == other._comparable
def __ne__(self, other):
return not self == other
def __gt__(self, other):
return not self <= other
def __ge__(self, other):
return not self < other
class ObjectWrapper(object):
def __getattr__(self, name):
return getattr(self._wrapped, name)
def __setattr__(self, name, value):
return setattr(self._wrapped, name, value)
def wrapper_getattr(self, name):
"""Actual `self.getattr` rather than self._wrapped.getattr"""
try:
return object.__getattr__(self, name)
except AttributeError: # py2
return getattr(self, name)
def wrapper_setattr(self, name, value):
"""Actual `self.setattr` rather than self._wrapped.setattr"""
return object.__setattr__(self, name, value)
def __init__(self, wrapped):
"""
Thin wrapper around a given object
"""
self.wrapper_setattr('_wrapped', wrapped)
class SimpleTextIOWrapper(ObjectWrapper):
"""
Change only `.write()` of the wrapped object by encoding the passed
value and passing the result to the wrapped object's `.write()` method.
"""
# pylint: disable=too-few-public-methods
def __init__(self, wrapped, encoding):
super(SimpleTextIOWrapper, self).__init__(wrapped)
self.wrapper_setattr('encoding', encoding)
def write(self, s):
"""
Encode `s` and pass to the wrapped object's `.write()` method.
"""
return self._wrapped.write(s.encode(self.wrapper_getattr('encoding')))
class CallbackIOWrapper(ObjectWrapper):
def __init__(self, callback, stream, method="read"):
"""
Wrap a given `file`-like object's `read()` or `write()` to report
lengths to the given `callback`
"""
super(CallbackIOWrapper, self).__init__(stream)
func = getattr(stream, method)
if method == "write":
@wraps(func)
def write(data, *args, **kwargs):
res = func(data, *args, **kwargs)
callback(len(data))
return res
self.wrapper_setattr('write', write)
elif method == "read":
@wraps(func)
def read(*args, **kwargs):
data = func(*args, **kwargs)
callback(len(data))
return data
self.wrapper_setattr('read', read)
else:
raise KeyError("Can only wrap read/write methods")
def _is_utf(encoding):
try:
u'\u2588\u2589'.encode(encoding)
except UnicodeEncodeError: # pragma: no cover
return False
except Exception: # pragma: no cover
try:
return encoding.lower().startswith('utf-') or ('U8' == encoding)
except:
return False
else:
return True
def _supports_unicode(fp):
try:
return _is_utf(fp.encoding)
except AttributeError:
return False
def _is_ascii(s):
if isinstance(s, str):
for c in s:
if ord(c) > 255:
return False
return True
return _supports_unicode(s)
def _environ_cols_wrapper(): # pragma: no cover
"""
Return a function which gets width and height of console
(linux,osx,windows,cygwin).
"""
_environ_cols = None
if IS_WIN:
_environ_cols = _environ_cols_windows
if _environ_cols is None:
_environ_cols = _environ_cols_tput
if IS_NIX:
_environ_cols = _environ_cols_linux
return _environ_cols
def _environ_cols_windows(fp): # pragma: no cover
try:
from ctypes import windll, create_string_buffer
import struct
from sys import stdin, stdout
io_handle = -12 # assume stderr
if fp == stdin:
io_handle = -10
elif fp == stdout:
io_handle = -11
h = windll.kernel32.GetStdHandle(io_handle)
csbi = create_string_buffer(22)
res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
if res:
(_bufx, _bufy, _curx, _cury, _wattr, left, _top, right, _bottom,
_maxx, _maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
# nlines = bottom - top + 1
return right - left # +1
except:
pass
return None
def _environ_cols_tput(*_): # pragma: no cover
"""cygwin xterm (windows)"""
try:
import shlex
cols = int(subprocess.check_call(shlex.split('tput cols')))
# rows = int(subprocess.check_call(shlex.split('tput lines')))
return cols
except:
pass
return None
def _environ_cols_linux(fp): # pragma: no cover
try:
from termios import TIOCGWINSZ
from fcntl import ioctl
from array import array
except ImportError:
return None
else:
try:
return array('h', ioctl(fp, TIOCGWINSZ, '\0' * 8))[1]
except:
try:
return int(os.environ["COLUMNS"]) - 1
except KeyError:
return None
def _term_move_up(): # pragma: no cover
return '' if (os.name == 'nt') and (colorama is None) else '\x1b[A'
try:
# TODO consider using wcswidth third-party package for 0-width characters
from unicodedata import east_asian_width
except ImportError:
_text_width = len
else:
def _text_width(s):
return sum(
2 if east_asian_width(ch) in 'FW' else 1 for ch in _unicode(s))
| {
"repo_name": "olafhauk/mne-python",
"path": "mne/externals/tqdm/_tqdm/utils.py",
"copies": "14",
"size": "10090",
"license": "bsd-3-clause",
"hash": -7324371763762669000,
"line_mean": 28.2463768116,
"line_max": 78,
"alpha_frac": 0.5325074331,
"autogenerated": false,
"ratio": 4.280865507000424,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008454106280193236,
"num_lines": 345
} |
from functools import wraps
import os
import logging
import shutil
log = logging.getLogger(__name__)
def restoring_chdir(fn):
# XXX:dc: This would be better off in a neutral module
@wraps(fn)
def decorator(*args, **kw):
try:
path = os.getcwd()
return fn(*args, **kw)
finally:
os.chdir(path)
return decorator
class BaseBuilder(object):
"""
The Base for all Builders. Defines the API for subclasses.
Expects subclasses to define ``old_artifact_path``,
which points at the directory where artifacts should be copied from.
"""
_force = False
# old_artifact_path = ..
def __init__(self, version, force=False):
self.version = version
self._force = force
self.target = self.version.project.artifact_path(version=self.version.slug, type=self.type)
def force(self, **kwargs):
"""
An optional step to force a build even when nothing has changed.
"""
log.info("Forcing a build")
self._force = True
def build(self, id=None, **kwargs):
"""
Do the actual building of the documentation.
"""
raise NotImplementedError
def move(self, **kwargs):
"""
Move the documentation from it's generated place to its artifact directory.
"""
if os.path.exists(self.old_artifact_path):
if os.path.exists(self.target):
shutil.rmtree(self.target)
log.info("Copying %s on the local filesystem" % self.type)
shutil.copytree(self.old_artifact_path, self.target)
else:
log.warning("Not moving docs, because the build dir is unknown.")
def clean(self, **kwargs):
"""
Clean the path where documentation will be built
"""
if os.path.exists(self.old_artifact_path):
shutil.rmtree(self.old_artifact_path)
log.info("Removing old artifact path: %s" % self.old_artifact_path)
def docs_dir(self, docs_dir=None, **kwargs):
"""
Handle creating a custom docs_dir if it doesn't exist.
"""
if not docs_dir:
checkout_path = self.version.project.checkout_path(self.version.slug)
for possible_path in ['docs', 'doc', 'Doc', 'book']:
if os.path.exists(os.path.join(checkout_path, '%s' % possible_path)):
docs_dir = possible_path
break
if not docs_dir:
# Fallback to defaulting to '.'
docs_dir = '.'
return docs_dir
def create_index(self, extension='md', **kwargs):
"""
Create an index file if it needs it.
"""
docs_dir = self.docs_dir()
index_filename = os.path.join(docs_dir, 'index.{ext}'.format(ext=extension))
if not os.path.exists(index_filename):
readme_filename = os.path.join(docs_dir, 'README.{ext}'.format(ext=extension))
if os.path.exists(readme_filename):
os.system('mv {readme} {index}'.format(index=index_filename, readme=readme_filename))
else:
index_file = open(index_filename, 'w+')
index_text = """
Welcome to Read the Docs
------------------------
This is an autogenerated index file.
Please create a ``{dir}/index.{ext}`` or ``{dir}/README.{ext}`` file with your own content.
If you want to use another markup, choose a different builder in your settings.
"""
index_file.write(index_text.format(dir=docs_dir, ext=extension))
index_file.close()
| {
"repo_name": "raven47git/readthedocs.org",
"path": "readthedocs/doc_builder/base.py",
"copies": "1",
"size": "3646",
"license": "mit",
"hash": 4686593517819269000,
"line_mean": 30.4310344828,
"line_max": 101,
"alpha_frac": 0.5787164015,
"autogenerated": false,
"ratio": 4.0874439461883405,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.516616034768834,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
import logging
import shutil
log = logging.getLogger(__name__)
def restoring_chdir(fn):
#XXX:dc: This would be better off in a neutral module
@wraps(fn)
def decorator(*args, **kw):
try:
path = os.getcwd()
return fn(*args, **kw)
finally:
os.chdir(path)
return decorator
class BaseBuilder(object):
"""
The Base for all Builders. Defines the API for subclasses.
Expects subclasses to define ``old_artifact_path``,
which points at the directory where artifacts should be copied from.
"""
_force = False
# old_artifact_path = ..
def __init__(self, version, force=False):
self.version = version
self._force = force
self.target = self.version.project.artifact_path(version=self.version.slug, type=self.type)
def force(self, **kwargs):
"""
An optional step to force a build even when nothing has changed.
"""
log.info("Forcing a build")
self._force = True
def build(self, id=None, **kwargs):
"""
Do the actual building of the documentation.
"""
raise NotImplementedError
def move(self, **kwargs):
"""
Move the documentation from it's generated place to its artifact directory.
"""
if os.path.exists(self.old_artifact_path):
if os.path.exists(self.target):
shutil.rmtree(self.target)
log.info("Copying %s on the local filesystem" % self.type)
shutil.copytree(self.old_artifact_path, self.target)
else:
log.warning("Not moving docs, because the build dir is unknown.")
| {
"repo_name": "vincentbernat/readthedocs.org",
"path": "readthedocs/doc_builder/base.py",
"copies": "2",
"size": "1720",
"license": "mit",
"hash": 2842243231134400500,
"line_mean": 27.6666666667,
"line_max": 99,
"alpha_frac": 0.6040697674,
"autogenerated": false,
"ratio": 4.226044226044226,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5830113993444226,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
import logging
log = logging.getLogger(__name__)
def restoring_chdir(fn):
#XXX:dc: This would be better off in a neutral module
@wraps(fn)
def decorator(*args, **kw):
try:
path = os.getcwd()
return fn(*args, **kw)
finally:
os.chdir(path)
return decorator
class BaseBuilder(object):
"""
The Base for all Builders. Defines the API for subclasses.
All workflow steps need to return true, otherwise it is assumed something
went wrong and the Builder will stop
"""
workflow = ['clean', 'build', 'move']
force = False
def __init__(self, version):
self.version = version
def run(self, **kwargs):
for step in self.workflow:
fn = getattr(self, step)
result = fn()
assert result
@restoring_chdir
def force(self, **kwargs):
"""
An optional step to force a build even when nothing has changed.
"""
log.info("Forcing a build")
self.force = True
def clean(self, **kwargs):
"""
Clean up the version so it's ready for usage.
This is used to add RTD specific stuff to Sphinx, and to
implement whitelists on projects as well.
It is guaranteed to be called before your project is built.
"""
raise NotImplementedError
def build(self, id=None, **kwargs):
"""
Do the actual building of the documentation.
"""
raise NotImplementedError
def move(self, **kwargs):
"""
Move the documentation from it's generated place to its final home.
This needs to understand both a single server dev environment,
as well as a multi-server environment.
"""
raise NotImplementedError
@property
def changed(self):
"""
Says whether the documentation has changed, and requires further action.
This is mainly used to short-circuit more expensive builds of other
output formats if the project docs didn't change on an update.
Subclasses are recommended to override for more efficient builds.
Defaults to `True`
"""
return True
| {
"repo_name": "ojii/readthedocs.org",
"path": "readthedocs/doc_builder/base.py",
"copies": "1",
"size": "2260",
"license": "mit",
"hash": -8554999937035007000,
"line_mean": 26.2289156627,
"line_max": 80,
"alpha_frac": 0.6066371681,
"autogenerated": false,
"ratio": 4.737945492662474,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5844582660762475,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
import logging
log = logging.getLogger(__name__)
def restoring_chdir(fn):
#XXX:dc: This would be better off in a neutral module
@wraps(fn)
def decorator(*args, **kw):
try:
path = os.getcwd()
return fn(*args, **kw)
finally:
os.chdir(path)
return decorator
class BaseBuilder(object):
"""
The Base for all Builders. Defines the API for subclasses.
All workflow steps need to return true, otherwise it is assumed something
went wrong and the Builder will stop
"""
workflow = ['clean', 'build', 'move']
force = False
def __init__(self, version):
self.version = version
def run(self, **kwargs):
for step in self.workflow:
fn = getattr(self, step)
result = fn()
assert result
@restoring_chdir
def force(self, **kwargs):
"""
An optional step to force a build even when nothing has changed.
"""
log.info("Forcing a build")
self.force = True
def clean(self, **kwargs):
"""
Clean up the version so it's ready for usage.
This is used to add RTD specific stuff to Sphinx, and to
implement whitelists on projects as well.
It is guaranteed to be called before your project is built.
"""
raise NotImplementedError
def build(self, id=None, **kwargs):
"""
Do the actual building of the documentation.
"""
raise NotImplementedError
def move(self, **kwargs):
"""
Move the documentation from it's generated place to its final home.
This needs to understand both a single server dev environment,
as well as a multi-server environment.
"""
raise NotImplementedError
@property
def changed(self):
"""Says whether the documentation has changed, and requires further
action.
This is mainly used to short-circuit more expensive builds of other
output formats if the project docs didn't change on an update.
Subclasses are recommended to override for more efficient builds.
Defaults to `True`
"""
return True
| {
"repo_name": "1suming/readthedocs.org",
"path": "readthedocs/doc_builder/base.py",
"copies": "3",
"size": "2261",
"license": "mit",
"hash": -3299602246900728300,
"line_mean": 25.6,
"line_max": 77,
"alpha_frac": 0.6063688633,
"autogenerated": false,
"ratio": 4.740041928721174,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6846410792021174,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
import pickle
import six
from .types import Command
DEVNULL = open(os.devnull, 'w')
def which(program):
"""Returns `program` path or `None`."""
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def wrap_settings(params):
"""Adds default values to settings if it not presented.
Usage:
@wrap_settings({'apt': '/usr/bin/apt'})
def match(command, settings):
print(settings.apt)
"""
def decorator(fn):
@wraps(fn)
def wrapper(command, settings):
return fn(command, settings.update(**params))
return wrapper
return decorator
def sudo_support(fn):
"""Removes sudo before calling fn and adds it after."""
@wraps(fn)
def wrapper(command, settings):
if not command.script.startswith('sudo '):
return fn(command, settings)
result = fn(Command(command.script[5:],
command.stdout,
command.stderr),
settings)
if result and isinstance(result, six.string_types):
return u'sudo {}'.format(result)
else:
return result
return wrapper
def memoize(fn):
"""Caches previous calls to the function."""
memo = {}
@wraps(fn)
def wrapper(*args, **kwargs):
key = pickle.dumps((args, kwargs))
if key not in memo:
memo[key] = fn(*args, **kwargs)
return memo[key]
return wrapper
| {
"repo_name": "ytjiang/thefuck",
"path": "thefuck/utils.py",
"copies": "1",
"size": "1895",
"license": "mit",
"hash": -1633204491955198200,
"line_mean": 22.6875,
"line_max": 66,
"alpha_frac": 0.5646437995,
"autogenerated": false,
"ratio": 4.164835164835165,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 80
} |
from functools import wraps
import os
import pkgutil
from threading import local
import warnings
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import cached_property
from django.utils.importlib import import_module
from django.utils.module_loading import import_by_path
from django.utils._os import upath
from django.utils import six
DEFAULT_DB_ALIAS = 'default'
class Error(Exception if six.PY3 else StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class DataError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class InternalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DatabaseErrorWrapper(object):
"""
Context manager and decorator that re-throws backend-specific database
exceptions using Django's common wrappers.
"""
def __init__(self, wrapper):
"""
wrapper is a database wrapper.
It must have a Database attribute defining PEP-249 exceptions.
"""
self.wrapper = wrapper
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
return
for dj_exc_type in (
DataError,
OperationalError,
IntegrityError,
InternalError,
ProgrammingError,
NotSupportedError,
DatabaseError,
InterfaceError,
Error,
):
db_exc_type = getattr(self.wrapper.Database, dj_exc_type.__name__)
if issubclass(exc_type, db_exc_type):
# Under Python 2.6, exc_value can still be a string.
try:
args = tuple(exc_value.args)
except AttributeError:
args = (exc_value,)
dj_exc_value = dj_exc_type(*args)
dj_exc_value.__cause__ = exc_value
# Only set the 'errors_occurred' flag for errors that may make
# the connection unusable.
if dj_exc_type not in (DataError, IntegrityError):
self.wrapper.errors_occurred = True
six.reraise(dj_exc_type, dj_exc_value, traceback)
def __call__(self, func):
@wraps(func)
def inner(*args, **kwargs):
with self:
return func(*args, **kwargs)
return inner
def load_backend(backend_name):
# Look for a fully qualified database backend name
try:
return import_module('.base', backend_name)
except ImportError as e_user:
# The database backend wasn't found. Display a helpful error message
# listing all possible (built-in) database backends.
backend_dir = os.path.join(os.path.dirname(upath(__file__)), 'backends')
try:
builtin_backends = [
name for _, name, ispkg in pkgutil.iter_modules([backend_dir])
if ispkg and name != 'dummy']
except EnvironmentError:
builtin_backends = []
if backend_name not in ['django.db.backends.%s' % b for b in
builtin_backends]:
backend_reprs = map(repr, sorted(builtin_backends))
error_msg = ("%r isn't an available database backend.\n"
"Try using 'django.db.backends.XXX', where XXX "
"is one of:\n %s\nError was: %s" %
(backend_name, ", ".join(backend_reprs), e_user))
raise ImproperlyConfigured(error_msg)
else:
# If there's some other error, this must be an error in Django
raise
class ConnectionDoesNotExist(Exception):
pass
class ConnectionHandler(object):
def __init__(self, databases=None):
"""
databases is an optional dictionary of database definitions (structured
like settings.DATABASES).
"""
self._databases = databases
self._connections = local()
@cached_property
def databases(self):
if self._databases is None:
self._databases = settings.DATABASES
if self._databases == {}:
self._databases = {
DEFAULT_DB_ALIAS: {
'ENGINE': 'django.db.backends.dummy',
},
}
if DEFAULT_DB_ALIAS not in self._databases:
raise ImproperlyConfigured("You must define a '%s' database" % DEFAULT_DB_ALIAS)
return self._databases
def ensure_defaults(self, alias):
"""
Puts the defaults into the settings dictionary for a given connection
where no settings is provided.
"""
try:
conn = self.databases[alias]
except KeyError:
raise ConnectionDoesNotExist("The connection %s doesn't exist" % alias)
conn.setdefault('ATOMIC_REQUESTS', False)
if settings.TRANSACTIONS_MANAGED:
warnings.warn(
"TRANSACTIONS_MANAGED is deprecated. Use AUTOCOMMIT instead.",
PendingDeprecationWarning, stacklevel=2)
conn.setdefault('AUTOCOMMIT', False)
conn.setdefault('AUTOCOMMIT', True)
conn.setdefault('ENGINE', 'django.db.backends.dummy')
if conn['ENGINE'] == 'django.db.backends.' or not conn['ENGINE']:
conn['ENGINE'] = 'django.db.backends.dummy'
conn.setdefault('CONN_MAX_AGE', 0)
conn.setdefault('OPTIONS', {})
conn.setdefault('TIME_ZONE', 'UTC' if settings.USE_TZ else settings.TIME_ZONE)
for setting in ['NAME', 'USER', 'PASSWORD', 'HOST', 'PORT']:
conn.setdefault(setting, '')
for setting in ['TEST_CHARSET', 'TEST_COLLATION', 'TEST_NAME', 'TEST_MIRROR']:
conn.setdefault(setting, None)
def __getitem__(self, alias):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
self.ensure_defaults(alias)
db = self.databases[alias]
backend = load_backend(db['ENGINE'])
conn = backend.DatabaseWrapper(db, alias)
setattr(self._connections, alias, conn)
return conn
def __setitem__(self, key, value):
setattr(self._connections, key, value)
def __delitem__(self, key):
delattr(self._connections, key)
def __iter__(self):
return iter(self.databases)
def all(self):
return [self[alias] for alias in self]
class ConnectionRouter(object):
def __init__(self, routers=None):
"""
If routers is not specified, will default to settings.DATABASE_ROUTERS.
"""
self._routers = routers
@cached_property
def routers(self):
if self._routers is None:
self._routers = settings.DATABASE_ROUTERS
routers = []
for r in self._routers:
if isinstance(r, six.string_types):
router = import_by_path(r)()
else:
router = r
routers.append(router)
return routers
def _router_func(action):
def _route_db(self, model, **hints):
chosen_db = None
for router in self.routers:
try:
method = getattr(router, action)
except AttributeError:
# If the router doesn't have a method, skip to the next one.
pass
else:
chosen_db = method(model, **hints)
if chosen_db:
return chosen_db
try:
return hints['instance']._state.db or DEFAULT_DB_ALIAS
except KeyError:
return DEFAULT_DB_ALIAS
return _route_db
db_for_read = _router_func('db_for_read')
db_for_write = _router_func('db_for_write')
def allow_relation(self, obj1, obj2, **hints):
for router in self.routers:
try:
method = router.allow_relation
except AttributeError:
# If the router doesn't have a method, skip to the next one.
pass
else:
allow = method(obj1, obj2, **hints)
if allow is not None:
return allow
return obj1._state.db == obj2._state.db
def allow_syncdb(self, db, model):
for router in self.routers:
try:
method = router.allow_syncdb
except AttributeError:
# If the router doesn't have a method, skip to the next one.
pass
else:
allow = method(db, model)
if allow is not None:
return allow
return True
| {
"repo_name": "eltonsantos/django",
"path": "django/db/utils.py",
"copies": "3",
"size": "8980",
"license": "bsd-3-clause",
"hash": 4267677708260966000,
"line_mean": 30.9572953737,
"line_max": 92,
"alpha_frac": 0.5668151448,
"autogenerated": false,
"ratio": 4.574630667345899,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.66414458121459,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
import random
from django.core.cache import cache
from . import settings
def partial(func, *parameters, **kparms):
@wraps(func)
def wrapped(*args, **kw):
kw.update(kparms)
return func(*(args + parameters), **kw)
return wrapped
def assets_dir(instance, filename):
name, ext = os.path.splitext(filename)
if instance.rename_file():
name = instance.slug
if settings.DIRECTORY:
return '/'.join([settings.DIRECTORY, name + ext])
else:
return instance.slug + ext
def get_size_filename(filename, size_name):
filename, ext = os.path.splitext(filename)
return filename + "_" + size_name + ext
def get_cache_bust_version(url):
key = "cbversion.{0}".format(url)
value = cache.get(key)
if not value:
# We could look it up but for now just make it up
value = update_cache_bust_version(url, random.randint(0, 60))
return value
def update_cache_bust_version(url, value=None):
key = "cbversion.{0}".format(url)
if not value:
value = cache.get(key)
if value:
value = int(value) + 1
else:
value = 1
cache.set(key, value, 60*60*24*60)
return value
| {
"repo_name": "ff0000/scarlet",
"path": "scarlet/assets/utils.py",
"copies": "1",
"size": "1235",
"license": "mit",
"hash": 1964094179717079600,
"line_mean": 22.75,
"line_max": 69,
"alpha_frac": 0.6315789474,
"autogenerated": false,
"ratio": 3.569364161849711,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4700943109249711,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
import re
import inspect
try:
import unittest.mock as mock
except ImportError:
import mock
norm_pattern = re.compile(r'[/|.]')
def patch_plugin_file(*patch_args, **patch_kwargs):
"""
Decorator used to search for in items:
"""
root, filename = os.path.split(patch_args[0])
module_name, file_ext = os.path.splitext(filename)
namespace = '_'.join([re.sub(norm_pattern, '__', root), module_name])
import sys
found_modules = [key for key in sys.modules.keys() if namespace in key]
if len(found_modules) != 1:
raise(NameError('Tried to find 1 module from file %s but found: %s' %
(found_modules, namespace)))
module = sys.modules[found_modules.pop()]
def patch_decorator(func, *patch_decorator_args):
if not inspect.isclass(func):
@wraps(func)
@mock.patch.object(module, *patch_args[1:], **patch_kwargs)
def wrapper(*args, **kwargs):
return func(*(args + patch_decorator_args), **kwargs)
return wrapper
else:
@mock.patch.object(module, *patch_args[1:], **patch_kwargs)
class WrappedClass(func):
pass
return WrappedClass
return patch_decorator
| {
"repo_name": "wongwill86/air-tasks",
"path": "tests/utils/mock_helpers.py",
"copies": "1",
"size": "1302",
"license": "mit",
"hash": 934084375931314800,
"line_mean": 30,
"line_max": 77,
"alpha_frac": 0.6044546851,
"autogenerated": false,
"ratio": 3.863501483679525,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4967956168779525,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
import re
import sys
from unittest.case import _ExpectedFailure, _UnexpectedSuccess
import sublime
from unittesting import DeferrableTestCase, AWAIT_WORKER
from GitSavvy.tests.mockito import mock, unstub, verify, when
from GitSavvy.tests.parameterized import parameterized as p
import GitSavvy.core.commands.diff as module
from GitSavvy.core.commands.diff import gs_diff, gs_diff_refresh
def isiterable(obj):
return hasattr(obj, '__iter__')
def expectedFailure(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
deferred = func(*args, **kwargs)
if isiterable(deferred):
yield from deferred
except Exception:
raise _ExpectedFailure(sys.exc_info())
raise _UnexpectedSuccess
return wrapper
THIS_DIRNAME = os.path.dirname(os.path.realpath(__file__))
RUNNING_ON_LINUX_TRAVIS = os.environ.get('TRAVIS_OS_NAME') == 'linux'
expectedFailureOnLinuxTravis = expectedFailure if RUNNING_ON_LINUX_TRAVIS else lambda f: f
def fixture(name):
with open(os.path.join(THIS_DIRNAME, 'fixtures', name)) as f:
return f.read()
class TestDiffViewInternalFunctions(DeferrableTestCase):
@classmethod
def setUpClass(cls):
sublime.run_command("new_window")
cls.window = sublime.active_window()
s = sublime.load_settings("Preferences.sublime-settings")
s.set("close_windows_when_empty", False)
@classmethod
def tearDownClass(self):
self.window.run_command('close_window')
@p.expand([
([1, 2, 3, 4, 5], [[1, 2, 3, 4, 5], [2, 3, 4], [3]]),
([1, 2, 3, 4], [[1, 2, 3, 4], [2, 3]]),
([1, 2, 3], [[1, 2, 3], [2]]),
([1, 2], [[1, 2]]),
([1], [[1]]),
([], [])
])
def test_shrink_list(self, IN, expected):
actual = module.shrink_list_sym(IN)
actual = list(actual)
self.assertEqual(actual, expected)
@p.expand([
("@@ 1\n1234\n1567\n1890", (41, 46)),
("@@ 1\n1234\n1567", (41, 46)),
("@@ 1\n1567\n1890", (41, 46)),
("@@ 1\n1234", (41, 46)),
("@@ 1\n1567", (41, 46)),
("@@ 1\n1890", (41, 46)),
("@@ 1\n1XXX\n1XXX", (41, 46)),
("@@ X\n1234\n1567\n1890", (41, 46)),
("@@ X\n1567\n1890", (41, 46)),
("@@ X\n1234\n1567", (41, 46)),
("@@ X\n1234", (41, 46)),
("@@ X\n1567", (41, 46)),
("@@ X\n1890", (41, 46)),
("@@ X\n1XXX\n1567\n1890", (41, 46)),
("@@ X\n1234\n1567\n1XXX", (41, 46)),
("@@ X\n1XXX\n1567\n1XXX", (41, 46)),
("@@ X\n1234\n1XXX\n1XXX", None),
("@@ X\n1XXX\n1XXX\n1890", None),
("@@ X\n1XXX\n1XXX\n1XXX", None),
("@@ X\n0123", None),
# Only consider first hunk in input
("@@ X\n1234\n1567\n1890\n@@ 2\n2345\n2678", (41, 46)),
("@@ X\n1234\n@@ 2\n2345\n2678", (41, 46)),
("@@ X\n1234\n1567\n1890\n@@ X\n2XXX\n2678", (41, 46)),
# Ensure invalid input doesn't throw
("@@ X\n", None),
("1234\n1567\n1890", None),
])
def test_find_hunk_in_view(self, IN, expected):
VIEW_CONTENT = """\
0123
diff --git a/barz b/fooz
+++ b/fooz
@@ 1
1234
1567
1890
@@ 2
2345
2678
"""
view = self.window.new_file()
self.addCleanup(view.close)
view.run_command('append', {'characters': VIEW_CONTENT})
view.set_scratch(True)
actual = module.find_hunk_in_view(view, "diff --git a/barz b/fooz\n+++ b/fooz\n" + IN)
actual = (actual.a, actual.b) if actual else actual
self.assertEqual(actual, expected)
class TestDiffViewJumpingToFile(DeferrableTestCase):
@classmethod
def setUpClass(cls):
sublime.run_command("new_window")
cls.window = sublime.active_window()
s = sublime.load_settings("Preferences.sublime-settings")
s.set("close_windows_when_empty", False)
@classmethod
def tearDownClass(self):
self.window.run_command('close_window')
def tearDown(self):
unstub()
@p.expand([
(79, ('barz', 16, 1)),
(80, ('barz', 16, 1)),
(81, ('barz', 16, 2)),
(85, ('barz', 17, 1)),
(86, ('barz', 17, 2)),
# on a '-' try to select next '+' line
(111, ('barz', 20, 1)), # jump to 'four'
(209, ('boox', 17, 1)), # jump to 'thr'
(210, ('boox', 17, 2)),
(211, ('boox', 17, 3)),
(212, ('boox', 17, 4)),
(213, ('boox', 17, 1)),
(214, ('boox', 17, 1)),
(223, ('boox', 19, 1)), # all jump to 'sev'
(228, ('boox', 19, 1)),
(233, ('boox', 19, 1)),
(272, ('boox', 25, 5)),
(280, ('boox', 25, 5)),
(319, ('boox', 30, 1)), # but do not jump if indentation does not match
# cursor on the hunk info line selects first diff line
(58, ('barz', 16, 1)),
(59, ('barz', 16, 1)),
(89, ('barz', 20, 1)),
])
def test_a(self, CURSOR, EXPECTED):
VIEW_CONTENT = """\
prelude
--
diff --git a/fooz b/barz
--- a/fooz
+++ b/barz
@@ -16,1 +16,1 @@ Hi
one
+two
@@ -20,1 +20,1 @@ Ho
-three
context
+four
diff --git a/foxx b/boxx
--- a/foox
+++ b/boox
@@ -16,1 +16,1 @@ Hello
one
-two
+thr
fou
-fiv
-six
+sev
eig
@@ -24 +24 @@ Hello
one
- two
thr
@@ -30 +30 @@ Hello
one
- two
thr
"""
view = self.window.new_file()
self.addCleanup(view.close)
view.run_command('append', {'characters': VIEW_CONTENT})
view.set_scratch(True)
cmd = module.gs_diff_open_file_at_hunk(view)
when(cmd).load_file_at_line(...)
view.sel().clear()
view.sel().add(CURSOR)
cmd.run({'unused_edit'})
# In all cases here "commit" is `None`
verify(cmd).load_file_at_line(None, *EXPECTED)
class TestDiffViewHunking(DeferrableTestCase):
@classmethod
def setUpClass(cls):
sublime.run_command("new_window")
cls.window = sublime.active_window()
s = sublime.load_settings("Preferences.sublime-settings")
s.set("close_windows_when_empty", False)
@classmethod
def tearDownClass(self):
self.window.run_command('close_window')
def tearDown(self):
unstub()
HUNK1 = """\
diff --git a/fooz b/barz
--- a/fooz
+++ b/barz
@@ -16,1 +16,1 @@ Hi
one
two
"""
HUNK2 = """\
diff --git a/foxx b/boxx
--- a/foox
+++ b/boox
@@ -16,1 +16,1 @@ Hello
one
two
"""
@p.expand([
(58, HUNK1),
(68, HUNK1),
(79, HUNK1),
(84, HUNK1),
(88, HUNK1),
(136, HUNK2),
(146, HUNK2),
(156, HUNK2),
(166, HUNK2),
(169, HUNK2),
(170, HUNK2), # at EOF should work
])
def test_hunking_one_hunk(self, CURSOR, HUNK, IN_CACHED_MODE=False):
# Docstring here to get verbose parameterized printing
""""""
VIEW_CONTENT = """\
prelude
--
diff --git a/fooz b/barz
--- a/fooz
+++ b/barz
@@ -16,1 +16,1 @@ Hi
one
two
diff --git a/foxx b/boxx
--- a/foox
+++ b/boox
@@ -16,1 +16,1 @@ Hello
one
two
"""
view = self.window.new_file()
self.addCleanup(view.close)
view.run_command('append', {'characters': VIEW_CONTENT})
view.set_scratch(True)
view.settings().set('git_savvy.diff_view.in_cached_mode', IN_CACHED_MODE)
view.settings().set('git_savvy.diff_view.history', [])
cmd = module.gs_diff_stage_or_reset_hunk(view)
when(cmd).git(...)
when(cmd.view).run_command("gs_diff_refresh")
view.sel().clear()
view.sel().add(CURSOR)
cmd.run({'unused_edit'})
history = view.settings().get('git_savvy.diff_view.history')
self.assertEqual(len(history), 1)
actual = history.pop()
expected = [['apply', None, '--cached', None, '-'], HUNK, [CURSOR], IN_CACHED_MODE]
self.assertEqual(actual, expected)
HUNK3 = """\
diff --git a/fooz b/barz
--- a/fooz
+++ b/barz
@@ -16,1 +16,1 @@ Hi
one
two
@@ -20,1 +20,1 @@ Ho
three
four
"""
HUNK4 = """\
diff --git a/fooz b/barz
--- a/fooz
+++ b/barz
@@ -20,1 +20,1 @@ Ho
three
four
diff --git a/foxx b/boxx
--- a/foox
+++ b/boox
@@ -16,1 +16,1 @@ Hello
one
two
"""
@p.expand([
# De-duplicate cursors in the same hunk
([58, 79], HUNK1),
([58, 79, 84], HUNK1),
# Combine hunks
([58, 89], HUNK3),
([89, 170], HUNK4),
# Ignore cursors not in a hunk
([2, 11, 58, 79], HUNK1),
([58, 89, 123], HUNK3),
([11, 89, 123, 170], HUNK4),
])
def test_hunking_two_hunks(self, CURSORS, PATCH, IN_CACHED_MODE=False):
VIEW_CONTENT = """\
prelude
--
diff --git a/fooz b/barz
--- a/fooz
+++ b/barz
@@ -16,1 +16,1 @@ Hi
one
two
@@ -20,1 +20,1 @@ Ho
three
four
diff --git a/foxx b/boxx
--- a/foox
+++ b/boox
@@ -16,1 +16,1 @@ Hello
one
two
"""
view = self.window.new_file()
self.addCleanup(view.close)
view.run_command('append', {'characters': VIEW_CONTENT})
view.set_scratch(True)
view.settings().set('git_savvy.diff_view.in_cached_mode', IN_CACHED_MODE)
view.settings().set('git_savvy.diff_view.history', [])
cmd = module.gs_diff_stage_or_reset_hunk(view)
when(cmd).git(...)
when(cmd.view).run_command("gs_diff_refresh")
# when(module.gs_diff_stage_or_reset_hunk).git(...)
# when(module).refresh(view)
view.sel().clear()
for c in CURSORS:
view.sel().add(c)
cmd.run({'unused_edit'})
history = view.settings().get('git_savvy.diff_view.history')
self.assertEqual(len(history), 1)
actual = history.pop()
expected = [['apply', None, '--cached', None, '-'], PATCH, CURSORS, IN_CACHED_MODE]
self.assertEqual(actual, expected)
def test_sets_unidiff_zero_if_no_contextual_lines(self):
VIEW_CONTENT = """\
prelude
--
diff --git a/fooz b/barz
--- a/fooz
+++ b/barz
@@ -16,1 +16,1 @@ Hi
one
two
"""
CURSOR = 58
view = self.window.new_file()
self.addCleanup(view.close)
view.run_command('append', {'characters': VIEW_CONTENT})
view.set_scratch(True)
# view.settings().set('git_savvy.diff_view.in_cached_mode', IN_CACHED_MODE)
view.settings().set('git_savvy.diff_view.history', [])
view.settings().set('git_savvy.diff_view.context_lines', 0)
cmd = module.gs_diff_stage_or_reset_hunk(view)
when(cmd).git(...)
when(cmd.view).run_command("gs_diff_refresh")
view.sel().clear()
view.sel().add(CURSOR)
cmd.run({'unused_edit'})
history = view.settings().get('git_savvy.diff_view.history')
self.assertEqual(len(history), 1)
actual = history.pop()[0]
expected = ['apply', None, '--cached', '--unidiff-zero', '-']
self.assertEqual(actual, expected)
def test_status_message_if_not_in_hunk(self):
VIEW_CONTENT = """\
prelude
--
diff --git a/fooz b/barz
--- a/fooz
+++ b/barz
@@ -16,1 +16,1 @@ Hi
one
two
@@ -20,1 +20,1 @@ Ho
three
four
diff --git a/foxx b/boxx
--- a/foox
+++ b/boox
@@ -16,1 +16,1 @@ Hello
one
two
"""
view = self.window.new_file()
self.addCleanup(view.close)
view.run_command('append', {'characters': VIEW_CONTENT})
view.set_scratch(True)
window = mock()
when(view).window().thenReturn(window)
when(window).status_message(...)
view.sel().clear()
view.sel().add(0)
# Manually instantiate the cmd so we can inject our known view
cmd = module.gs_diff_stage_or_reset_hunk(view)
cmd.run('_unused_edit')
verify(window, times=1).status_message('Not within a hunk')
class TestZooming(DeferrableTestCase):
@classmethod
def setUpClass(cls):
sublime.run_command("new_window")
cls.window = sublime.active_window()
s = sublime.load_settings("Preferences.sublime-settings")
s.set("close_windows_when_empty", False)
@classmethod
def tearDownClass(self):
self.window.run_command('close_window')
@p.expand([
(0, '--unified=0'),
(1, '--unified=1'),
(3, '--unified=3'),
(5, '--unified=5'),
(None, None)
])
def test_adds_unified_flag_to_change_contextual_lines(self, CONTEXT_LINES, FLAG):
view = self.window.new_file()
self.addCleanup(view.close)
view.set_scratch(True)
view.settings().set("git_savvy.repo_path", "fake_repo_path")
view.settings().set('git_savvy.diff_view.context_lines', CONTEXT_LINES)
cmd = module.gs_diff_refresh(view)
when(cmd).git(...).thenReturn('NEW CONTENT')
cmd.run({'unused_edit'})
verify(cmd).git('diff', None, FLAG, ...)
@p.expand([
(0, 2, 2),
(3, 2, 5),
(3, -2, 1),
(2, -2, 0),
(1, -2, 0),
(0, -2, 0),
])
def test_updates_view_state_when_zooming(self, BEFORE, AMOUNT, EXPECTED):
view = self.window.new_file()
self.addCleanup(view.close)
view.set_scratch(True)
view.settings().set('git_savvy.diff_view.context_lines', BEFORE)
cmd = module.gs_diff_zoom(view)
when(cmd.view).run_command("gs_diff_refresh")
cmd.run({'unused_edit'}, AMOUNT)
actual = view.settings().get('git_savvy.diff_view.context_lines')
self.assertEqual(actual, EXPECTED)
class TestDiffView(DeferrableTestCase):
@classmethod
def setUpClass(cls):
sublime.run_command("new_window")
cls.window = sublime.active_window()
s = sublime.load_settings("Preferences.sublime-settings")
s.set("close_windows_when_empty", False)
@classmethod
def tearDownClass(self):
self.window.run_command('close_window')
def setUp(self):
self.view = self.window.new_file()
self.view.set_scratch(True)
self.addCleanup(self.view.close)
def tearDown(self):
unstub()
@p.expand([
('in_cached_mode', False),
('ignore_whitespace', False),
('base_commit', None),
('target_commit', None),
('show_diffstat', True),
('context_lines', 3),
('disable_stage', False),
('history', []),
('just_hunked', ''),
])
def test_default_view_state(self, KEY, DEFAULT_VALUE):
REPO_PATH = '/not/there'
when(gs_diff_refresh).git('diff', ...).thenReturn('')
cmd = gs_diff(self.window)
when(cmd).get_repo_path().thenReturn(REPO_PATH)
cmd.run()
diff_view = self.window.active_view()
self.addCleanup(diff_view.close)
actual = diff_view.settings().get('git_savvy.diff_view.{}'.format(KEY))
self.assertEqual(actual, DEFAULT_VALUE)
def test_sets_repo_path(self):
REPO_PATH = '/not/there'
when(gs_diff_refresh).git('diff', ...).thenReturn('')
cmd = gs_diff(self.window)
when(cmd).get_repo_path().thenReturn(REPO_PATH)
cmd.run()
diff_view = self.window.active_view()
self.addCleanup(diff_view.close)
actual = diff_view.settings().get('git_savvy.repo_path')
self.assertEqual(actual, REPO_PATH)
@expectedFailureOnLinuxTravis
def test_extract_clickable_lines(self):
REPO_PATH = '/not/there'
DIFF = fixture('diff_1.txt')
when(gs_diff_refresh).git('diff', ...).thenReturn(DIFF)
cmd = gs_diff(self.window)
when(cmd).get_repo_path().thenReturn(REPO_PATH)
cmd.run()
yield AWAIT_WORKER # await activated_async
yield AWAIT_WORKER # await refresh async
diff_view = self.window.active_view()
self.addCleanup(diff_view.close)
actual = diff_view.find_all_results()
# `find_all_results` only returns full filename-with-line matches.
# These match clicking on `@@ -52,8 +XX,7` lines
expected = [
('/not/there/core/commands/custom.py', 16, 0),
('/not/there/core/commands/diff.py', 52, 0),
('/not/there/core/commands/diff.py', 63, 0)
]
self.assertEqual(actual, expected)
@expectedFailureOnLinuxTravis
def test_result_file_regex(self):
REPO_PATH = '/not/there'
DIFF = fixture('diff_1.txt')
when(gs_diff_refresh).git('diff', ...).thenReturn(DIFF)
cmd = gs_diff(self.window)
when(cmd).get_repo_path().thenReturn(REPO_PATH)
cmd.run()
yield AWAIT_WORKER # await activated_async
yield AWAIT_WORKER # await refresh async
diff_view = self.window.active_view()
self.addCleanup(diff_view.close)
BUFFER_CONTENT = diff_view.substr(sublime.Region(0, diff_view.size()))
self.assertEqual(
BUFFER_CONTENT,
'''
UNSTAGED CHANGES
--
''' + DIFF
)
regex = diff_view.settings().get('result_file_regex')
matches = re.findall(regex, BUFFER_CONTENT, re.M)
expected = [
'core/commands/custom.py',
'core/commands/diff.py',
'core/commands/custom.py',
'core/commands/custom.py',
'core/commands/custom.py',
'core/commands/diff.py',
'core/commands/diff.py',
'core/commands/diff.py'
]
self.assertEqual(matches, expected)
PRELUDE_HEIGHT = 4
matches = re.finditer(regex, BUFFER_CONTENT, re.M)
actual = [
(m.group(0), diff_view.rowcol(m.span(1)[0])[0] + 1 - PRELUDE_HEIGHT)
# Oh boy, a oneliner. ^^^^^^^^^^^^ start offset
# ^^^^^^ convert to (row, col)
# ^^^^^^^ only take row
# but add 1 for convenience
for m in matches
]
expected = [
(' core/commands/custom.py |', 1),
(' core/commands/diff.py |', 2),
('diff --git a/core/commands/custom.py b/core/commands/custom.py', 5),
('--- a/core/commands/custom.py', 7),
('+++ b/core/commands/custom.py', 8),
('diff --git a/core/commands/diff.py b/core/commands/diff.py', 18),
('--- a/core/commands/diff.py', 20),
('+++ b/core/commands/diff.py', 21)
]
self.assertEqual(actual, expected)
def test_parse_diff(self):
DIFF = fixture('diff_1.txt')
diff = module.SplittedDiff.from_string(DIFF)
self.assertEqual(diff.commit_for_hunk(diff.hunks[0]), None)
def test_parse_commit(self):
DIFF = fixture('diff_2.txt')
diff = module.SplittedDiff.from_string(DIFF)
self.assertEqual(len(diff.hunks), 2)
self.assertEqual(len(diff.headers), 3)
self.assertEqual(diff.head_for_hunk(diff.hunks[0]), diff.headers[-1])
self.assertEqual(diff.head_for_hunk(diff.hunks[1]), diff.headers[-1])
self.assertEqual(diff.commit_for_hunk(diff.hunks[0]), diff.commits[0])
self.assertEqual(diff.commit_for_hunk(diff.hunks[1]), diff.commits[0])
| {
"repo_name": "divmain/GitSavvy",
"path": "tests/test_diff_view.py",
"copies": "1",
"size": "19220",
"license": "mit",
"hash": 1336568132340221700,
"line_mean": 26.7344877345,
"line_max": 94,
"alpha_frac": 0.5528616025,
"autogenerated": false,
"ratio": 3.155475291413561,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9207324898243969,
"avg_score": 0.00020239913391823053,
"num_lines": 693
} |
from functools import wraps
import os
import sys
# make sure gevent-socketio is not installed, as it conflicts with
# python-socketio
gevent_socketio_found = True
try:
from socketio import socketio_manage # noqa: F401
except ImportError:
gevent_socketio_found = False
if gevent_socketio_found:
print('The gevent-socketio package is incompatible with this version of '
'the Flask-SocketIO extension. Please uninstall it, and then '
'install the latest version of python-socketio in its place.')
sys.exit(1)
import flask
from flask import _request_ctx_stack, has_request_context, json as flask_json
from flask.sessions import SessionMixin
import socketio
from socketio.exceptions import ConnectionRefusedError # noqa: F401
from werkzeug.debug import DebuggedApplication
from werkzeug._reloader import run_with_reloader
from .namespace import Namespace
from .test_client import SocketIOTestClient
class _SocketIOMiddleware(socketio.WSGIApp):
"""This WSGI middleware simply exposes the Flask application in the WSGI
environment before executing the request.
"""
def __init__(self, socketio_app, flask_app, socketio_path='socket.io'):
self.flask_app = flask_app
super(_SocketIOMiddleware, self).__init__(socketio_app,
flask_app.wsgi_app,
socketio_path=socketio_path)
def __call__(self, environ, start_response):
environ = environ.copy()
environ['flask.app'] = self.flask_app
return super(_SocketIOMiddleware, self).__call__(environ,
start_response)
class _ManagedSession(dict, SessionMixin):
"""This class is used for user sessions that are managed by
Flask-SocketIO. It is simple dict, expanded with the Flask session
attributes."""
pass
class SocketIO(object):
"""Create a Flask-SocketIO server.
:param app: The flask application instance. If the application instance
isn't known at the time this class is instantiated, then call
``socketio.init_app(app)`` once the application instance is
available.
:param manage_session: If set to ``True``, this extension manages the user
session for Socket.IO events. If set to ``False``,
Flask's own session management is used. When using
Flask's cookie based sessions it is recommended that
you leave this set to the default of ``True``. When
using server-side sessions, a ``False`` setting
enables sharing the user session between HTTP routes
and Socket.IO events.
:param message_queue: A connection URL for a message queue service the
server can use for multi-process communication. A
message queue is not required when using a single
server process.
:param channel: The channel name, when using a message queue. If a channel
isn't specified, a default channel will be used. If
multiple clusters of SocketIO processes need to use the
same message queue without interfering with each other,
then each cluster should use a different channel.
:param path: The path where the Socket.IO server is exposed. Defaults to
``'socket.io'``. Leave this as is unless you know what you are
doing.
:param resource: Alias to ``path``.
:param kwargs: Socket.IO and Engine.IO server options.
The Socket.IO server options are detailed below:
:param client_manager: The client manager instance that will manage the
client list. When this is omitted, the client list
is stored in an in-memory structure, so the use of
multiple connected servers is not possible. In most
cases, this argument does not need to be set
explicitly.
:param logger: To enable logging set to ``True`` or pass a logger object to
use. To disable logging set to ``False``. The default is
``False``. Note that fatal errors will be logged even when
``logger`` is ``False``.
:param json: An alternative json module to use for encoding and decoding
packets. Custom json modules must have ``dumps`` and ``loads``
functions that are compatible with the standard library
versions. To use the same json encoder and decoder as a Flask
application, use ``flask.json``.
:param async_handlers: If set to ``True``, event handlers for a client are
executed in separate threads. To run handlers for a
client synchronously, set to ``False``. The default
is ``True``.
:param always_connect: When set to ``False``, new connections are
provisory until the connect handler returns
something other than ``False``, at which point they
are accepted. When set to ``True``, connections are
immediately accepted, and then if the connect
handler returns ``False`` a disconnect is issued.
Set to ``True`` if you need to emit events from the
connect handler and your client is confused when it
receives events before the connection acceptance.
In any other case use the default of ``False``.
The Engine.IO server configuration supports the following settings:
:param async_mode: The asynchronous model to use. See the Deployment
section in the documentation for a description of the
available options. Valid async modes are ``threading``,
``eventlet``, ``gevent`` and ``gevent_uwsgi``. If this
argument is not given, ``eventlet`` is tried first, then
``gevent_uwsgi``, then ``gevent``, and finally
``threading``. The first async mode that has all its
dependencies installed is then one that is chosen.
:param ping_interval: The interval in seconds at which the server pings
the client. The default is 25 seconds. For advanced
control, a two element tuple can be given, where
the first number is the ping interval and the second
is a grace period added by the server.
:param ping_timeout: The time in seconds that the client waits for the
server to respond before disconnecting. The default
is 5 seconds.
:param max_http_buffer_size: The maximum size of a message when using the
polling transport. The default is 1,000,000
bytes.
:param allow_upgrades: Whether to allow transport upgrades or not. The
default is ``True``.
:param http_compression: Whether to compress packages when using the
polling transport. The default is ``True``.
:param compression_threshold: Only compress messages when their byte size
is greater than this value. The default is
1024 bytes.
:param cookie: If set to a string, it is the name of the HTTP cookie the
server sends back to the client containing the client
session id. If set to a dictionary, the ``'name'`` key
contains the cookie name and other keys define cookie
attributes, where the value of each attribute can be a
string, a callable with no arguments, or a boolean. If set
to ``None`` (the default), a cookie is not sent to the
client.
:param cors_allowed_origins: Origin or list of origins that are allowed to
connect to this server. Only the same origin
is allowed by default. Set this argument to
``'*'`` to allow all origins, or to ``[]`` to
disable CORS handling.
:param cors_credentials: Whether credentials (cookies, authentication) are
allowed in requests to this server. The default is
``True``.
:param monitor_clients: If set to ``True``, a background task will ensure
inactive clients are closed. Set to ``False`` to
disable the monitoring task (not recommended). The
default is ``True``.
:param engineio_logger: To enable Engine.IO logging set to ``True`` or pass
a logger object to use. To disable logging set to
``False``. The default is ``False``. Note that
fatal errors are logged even when
``engineio_logger`` is ``False``.
"""
def __init__(self, app=None, **kwargs):
self.server = None
self.server_options = {}
self.wsgi_server = None
self.handlers = []
self.namespace_handlers = []
self.exception_handlers = {}
self.default_exception_handler = None
self.manage_session = True
# We can call init_app when:
# - we were given the Flask app instance (standard initialization)
# - we were not given the app, but we were given a message_queue
# (standard initialization for auxiliary process)
# In all other cases we collect the arguments and assume the client
# will call init_app from an app factory function.
if app is not None or 'message_queue' in kwargs:
self.init_app(app, **kwargs)
else:
self.server_options.update(kwargs)
def init_app(self, app, **kwargs):
if app is not None:
if not hasattr(app, 'extensions'):
app.extensions = {} # pragma: no cover
app.extensions['socketio'] = self
self.server_options.update(kwargs)
self.manage_session = self.server_options.pop('manage_session',
self.manage_session)
if 'client_manager' not in self.server_options:
url = self.server_options.pop('message_queue', None)
channel = self.server_options.pop('channel', 'flask-socketio')
write_only = app is None
if url:
if url.startswith(('redis://', "rediss://")):
queue_class = socketio.RedisManager
elif url.startswith(('kafka://')):
queue_class = socketio.KafkaManager
elif url.startswith('zmq'):
queue_class = socketio.ZmqManager
else:
queue_class = socketio.KombuManager
queue = queue_class(url, channel=channel,
write_only=write_only)
self.server_options['client_manager'] = queue
if 'json' in self.server_options and \
self.server_options['json'] == flask_json:
# flask's json module is tricky to use because its output
# changes when it is invoked inside or outside the app context
# so here to prevent any ambiguities we replace it with wrappers
# that ensure that the app context is always present
class FlaskSafeJSON(object):
@staticmethod
def dumps(*args, **kwargs):
with app.app_context():
return flask_json.dumps(*args, **kwargs)
@staticmethod
def loads(*args, **kwargs):
with app.app_context():
return flask_json.loads(*args, **kwargs)
self.server_options['json'] = FlaskSafeJSON
resource = self.server_options.pop('path', None) or \
self.server_options.pop('resource', None) or 'socket.io'
if resource.startswith('/'):
resource = resource[1:]
if os.environ.get('FLASK_RUN_FROM_CLI'):
if self.server_options.get('async_mode') is None:
self.server_options['async_mode'] = 'threading'
self.server = socketio.Server(**self.server_options)
self.async_mode = self.server.async_mode
for handler in self.handlers:
self.server.on(handler[0], handler[1], namespace=handler[2])
for namespace_handler in self.namespace_handlers:
self.server.register_namespace(namespace_handler)
if app is not None:
# here we attach the SocketIO middlware to the SocketIO object so
# it can be referenced later if debug middleware needs to be
# inserted
self.sockio_mw = _SocketIOMiddleware(self.server, app,
socketio_path=resource)
app.wsgi_app = self.sockio_mw
def on(self, message, namespace=None):
"""Decorator to register a SocketIO event handler.
This decorator must be applied to SocketIO event handlers. Example::
@socketio.on('my event', namespace='/chat')
def handle_my_custom_event(json):
print('received json: ' + str(json))
:param message: The name of the event. This is normally a user defined
string, but a few event names are already defined. Use
``'message'`` to define a handler that takes a string
payload, ``'json'`` to define a handler that takes a
JSON blob payload, ``'connect'`` or ``'disconnect'``
to create handlers for connection and disconnection
events.
:param namespace: The namespace on which the handler is to be
registered. Defaults to the global namespace.
"""
namespace = namespace or '/'
def decorator(handler):
@wraps(handler)
def _handler(sid, *args):
return self._handle_event(handler, message, namespace, sid,
*args)
if self.server:
self.server.on(message, _handler, namespace=namespace)
else:
self.handlers.append((message, _handler, namespace))
return handler
return decorator
def on_error(self, namespace=None):
"""Decorator to define a custom error handler for SocketIO events.
This decorator can be applied to a function that acts as an error
handler for a namespace. This handler will be invoked when a SocketIO
event handler raises an exception. The handler function must accept one
argument, which is the exception raised. Example::
@socketio.on_error(namespace='/chat')
def chat_error_handler(e):
print('An error has occurred: ' + str(e))
:param namespace: The namespace for which to register the error
handler. Defaults to the global namespace.
"""
namespace = namespace or '/'
def decorator(exception_handler):
if not callable(exception_handler):
raise ValueError('exception_handler must be callable')
self.exception_handlers[namespace] = exception_handler
return exception_handler
return decorator
def on_error_default(self, exception_handler):
"""Decorator to define a default error handler for SocketIO events.
This decorator can be applied to a function that acts as a default
error handler for any namespaces that do not have a specific handler.
Example::
@socketio.on_error_default
def error_handler(e):
print('An error has occurred: ' + str(e))
"""
if not callable(exception_handler):
raise ValueError('exception_handler must be callable')
self.default_exception_handler = exception_handler
return exception_handler
def on_event(self, message, handler, namespace=None):
"""Register a SocketIO event handler.
``on_event`` is the non-decorator version of ``'on'``.
Example::
def on_foo_event(json):
print('received json: ' + str(json))
socketio.on_event('my event', on_foo_event, namespace='/chat')
:param message: The name of the event. This is normally a user defined
string, but a few event names are already defined. Use
``'message'`` to define a handler that takes a string
payload, ``'json'`` to define a handler that takes a
JSON blob payload, ``'connect'`` or ``'disconnect'``
to create handlers for connection and disconnection
events.
:param handler: The function that handles the event.
:param namespace: The namespace on which the handler is to be
registered. Defaults to the global namespace.
"""
self.on(message, namespace=namespace)(handler)
def event(self, *args, **kwargs):
"""Decorator to register an event handler.
This is a simplified version of the ``on()`` method that takes the
event name from the decorated function.
Example usage::
@socketio.event
def my_event(data):
print('Received data: ', data)
The above example is equivalent to::
@socketio.on('my_event')
def my_event(data):
print('Received data: ', data)
A custom namespace can be given as an argument to the decorator::
@socketio.event(namespace='/test')
def my_event(data):
print('Received data: ', data)
"""
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
# the decorator was invoked without arguments
# args[0] is the decorated function
return self.on(args[0].__name__)(args[0])
else:
# the decorator was invoked with arguments
def set_handler(handler):
return self.on(handler.__name__, *args, **kwargs)(handler)
return set_handler
def on_namespace(self, namespace_handler):
if not isinstance(namespace_handler, Namespace):
raise ValueError('Not a namespace instance.')
namespace_handler._set_socketio(self)
if self.server:
self.server.register_namespace(namespace_handler)
else:
self.namespace_handlers.append(namespace_handler)
def emit(self, event, *args, **kwargs):
"""Emit a server generated SocketIO event.
This function emits a SocketIO event to one or more connected clients.
A JSON blob can be attached to the event as payload. This function can
be used outside of a SocketIO event context, so it is appropriate to
use when the server is the originator of an event, outside of any
client context, such as in a regular HTTP request handler or a
background task. Example::
@app.route('/ping')
def ping():
socketio.emit('ping event', {'data': 42}, namespace='/chat')
:param event: The name of the user event to emit.
:param args: A dictionary with the JSON data to send as payload.
:param namespace: The namespace under which the message is to be sent.
Defaults to the global namespace.
:param to: Send the message to all the users in the given room. If
this parameter is not included, the event is sent to all
connected users.
:param include_self: ``True`` to include the sender when broadcasting
or addressing a room, or ``False`` to send to
everyone but the sender.
:param skip_sid: The session id of a client to ignore when broadcasting
or addressing a room. This is typically set to the
originator of the message, so that everyone except
that client receive the message. To skip multiple sids
pass a list.
:param callback: If given, this function will be called to acknowledge
that the client has received the message. The
arguments that will be passed to the function are
those provided by the client. Callback functions can
only be used when addressing an individual client.
"""
namespace = kwargs.pop('namespace', '/')
to = kwargs.pop('to', kwargs.pop('room', None))
include_self = kwargs.pop('include_self', True)
skip_sid = kwargs.pop('skip_sid', None)
if not include_self and not skip_sid:
skip_sid = flask.request.sid
callback = kwargs.pop('callback', None)
if callback:
# wrap the callback so that it sets app app and request contexts
sid = None
if has_request_context():
sid = getattr(flask.request, 'sid', None)
original_callback = callback
def _callback_wrapper(*args):
return self._handle_event(original_callback, None, namespace,
sid, *args)
if sid:
# the callback wrapper above will install a request context
# before invoking the original callback
# we only use it if the emit was issued from a Socket.IO
# populated request context (i.e. request.sid is defined)
callback = _callback_wrapper
self.server.emit(event, *args, namespace=namespace, to=to,
skip_sid=skip_sid, callback=callback, **kwargs)
def send(self, data, json=False, namespace=None, to=None,
callback=None, include_self=True, skip_sid=None, **kwargs):
"""Send a server-generated SocketIO message.
This function sends a simple SocketIO message to one or more connected
clients. The message can be a string or a JSON blob. This is a simpler
version of ``emit()``, which should be preferred. This function can be
used outside of a SocketIO event context, so it is appropriate to use
when the server is the originator of an event.
:param data: The message to send, either a string or a JSON blob.
:param json: ``True`` if ``message`` is a JSON blob, ``False``
otherwise.
:param namespace: The namespace under which the message is to be sent.
Defaults to the global namespace.
:param to: Send the message only to the users in the given room. If
this parameter is not included, the message is sent to all
connected users.
:param include_self: ``True`` to include the sender when broadcasting
or addressing a room, or ``False`` to send to
everyone but the sender.
:param skip_sid: The session id of a client to ignore when broadcasting
or addressing a room. This is typically set to the
originator of the message, so that everyone except
that client receive the message. To skip multiple sids
pass a list.
:param callback: If given, this function will be called to acknowledge
that the client has received the message. The
arguments that will be passed to the function are
those provided by the client. Callback functions can
only be used when addressing an individual client.
"""
skip_sid = flask.request.sid if not include_self else skip_sid
if json:
self.emit('json', data, namespace=namespace, to=to,
skip_sid=skip_sid, callback=callback, **kwargs)
else:
self.emit('message', data, namespace=namespace, to=to,
skip_sid=skip_sid, callback=callback, **kwargs)
def close_room(self, room, namespace=None):
"""Close a room.
This function removes any users that are in the given room and then
deletes the room from the server. This function can be used outside
of a SocketIO event context.
:param room: The name of the room to close.
:param namespace: The namespace under which the room exists. Defaults
to the global namespace.
"""
self.server.close_room(room, namespace)
def run(self, app, host=None, port=None, **kwargs): # pragma: no cover
"""Run the SocketIO web server.
:param app: The Flask application instance.
:param host: The hostname or IP address for the server to listen on.
Defaults to 127.0.0.1.
:param port: The port number for the server to listen on. Defaults to
5000.
:param debug: ``True`` to start the server in debug mode, ``False`` to
start in normal mode.
:param use_reloader: ``True`` to enable the Flask reloader, ``False``
to disable it.
:param reloader_options: A dictionary with options that are passed to
the Flask reloader, such as ``extra_files``,
``reloader_type``, etc.
:param extra_files: A list of additional files that the Flask
reloader should watch. Defaults to ``None``.
Deprecated, use ``reloader_options`` instead.
:param log_output: If ``True``, the server logs all incoming
connections. If ``False`` logging is disabled.
Defaults to ``True`` in debug mode, ``False``
in normal mode. Unused when the threading async
mode is used.
:param kwargs: Additional web server options. The web server options
are specific to the server used in each of the supported
async modes. Note that options provided here will
not be seen when using an external web server such
as gunicorn, since this method is not called in that
case.
"""
if host is None:
host = '127.0.0.1'
if port is None:
server_name = app.config['SERVER_NAME']
if server_name and ':' in server_name:
port = int(server_name.rsplit(':', 1)[1])
else:
port = 5000
debug = kwargs.pop('debug', app.debug)
log_output = kwargs.pop('log_output', debug)
use_reloader = kwargs.pop('use_reloader', debug)
extra_files = kwargs.pop('extra_files', None)
reloader_options = kwargs.pop('reloader_options', {})
if extra_files:
reloader_options['extra_files'] = extra_files
app.debug = debug
if app.debug and self.server.eio.async_mode != 'threading':
# put the debug middleware between the SocketIO middleware
# and the Flask application instance
#
# mw1 mw2 mw3 Flask app
# o ---- o ---- o ---- o
# /
# o Flask-SocketIO
# \ middleware
# o
# Flask-SocketIO WebSocket handler
#
# BECOMES
#
# dbg-mw mw1 mw2 mw3 Flask app
# o ---- o ---- o ---- o ---- o
# /
# o Flask-SocketIO
# \ middleware
# o
# Flask-SocketIO WebSocket handler
#
self.sockio_mw.wsgi_app = DebuggedApplication(
self.sockio_mw.wsgi_app, evalex=True)
if self.server.eio.async_mode == 'threading':
try:
import simple_websocket # noqa: F401
except ImportError:
from werkzeug._internal import _log
_log('warning', 'WebSocket transport not available. Install '
'simple-websocket for improved performance.')
app.run(host=host, port=port, threaded=True,
use_reloader=use_reloader, **reloader_options, **kwargs)
elif self.server.eio.async_mode == 'eventlet':
def run_server():
import eventlet
import eventlet.wsgi
import eventlet.green
addresses = eventlet.green.socket.getaddrinfo(host, port)
if not addresses:
raise RuntimeError(
'Could not resolve host to a valid address')
eventlet_socket = eventlet.listen(addresses[0][4],
addresses[0][0])
# If provided an SSL argument, use an SSL socket
ssl_args = ['keyfile', 'certfile', 'server_side', 'cert_reqs',
'ssl_version', 'ca_certs',
'do_handshake_on_connect', 'suppress_ragged_eofs',
'ciphers']
ssl_params = {k: kwargs[k] for k in kwargs if k in ssl_args}
if len(ssl_params) > 0:
for k in ssl_params:
kwargs.pop(k)
ssl_params['server_side'] = True # Listening requires true
eventlet_socket = eventlet.wrap_ssl(eventlet_socket,
**ssl_params)
eventlet.wsgi.server(eventlet_socket, app,
log_output=log_output, **kwargs)
if use_reloader:
run_with_reloader(run_server, **reloader_options)
else:
run_server()
elif self.server.eio.async_mode == 'gevent':
from gevent import pywsgi
try:
from geventwebsocket.handler import WebSocketHandler
websocket = True
except ImportError:
app.logger.warning(
'WebSocket transport not available. Install '
'gevent-websocket for improved performance.')
websocket = False
log = 'default'
if not log_output:
log = None
if websocket:
self.wsgi_server = pywsgi.WSGIServer(
(host, port), app, handler_class=WebSocketHandler,
log=log, **kwargs)
else:
self.wsgi_server = pywsgi.WSGIServer((host, port), app,
log=log, **kwargs)
if use_reloader:
# monkey patching is required by the reloader
from gevent import monkey
monkey.patch_thread()
monkey.patch_time()
def run_server():
self.wsgi_server.serve_forever()
run_with_reloader(run_server, **reloader_options)
else:
self.wsgi_server.serve_forever()
def stop(self):
"""Stop a running SocketIO web server.
This method must be called from a HTTP or SocketIO handler function.
"""
if self.server.eio.async_mode == 'threading':
func = flask.request.environ.get('werkzeug.server.shutdown')
if func:
func()
else:
raise RuntimeError('Cannot stop unknown web server')
elif self.server.eio.async_mode == 'eventlet':
raise SystemExit
elif self.server.eio.async_mode == 'gevent':
self.wsgi_server.stop()
def start_background_task(self, target, *args, **kwargs):
"""Start a background task using the appropriate async model.
This is a utility function that applications can use to start a
background task using the method that is compatible with the
selected async mode.
:param target: the target function to execute.
:param args: arguments to pass to the function.
:param kwargs: keyword arguments to pass to the function.
This function returns an object compatible with the `Thread` class in
the Python standard library. The `start()` method on this object is
already called by this function.
"""
return self.server.start_background_task(target, *args, **kwargs)
def sleep(self, seconds=0):
"""Sleep for the requested amount of time using the appropriate async
model.
This is a utility function that applications can use to put a task to
sleep without having to worry about using the correct call for the
selected async mode.
"""
return self.server.sleep(seconds)
def test_client(self, app, namespace=None, query_string=None,
headers=None, auth=None, flask_test_client=None):
"""The Socket.IO test client is useful for testing a Flask-SocketIO
server. It works in a similar way to the Flask Test Client, but
adapted to the Socket.IO server.
:param app: The Flask application instance.
:param namespace: The namespace for the client. If not provided, the
client connects to the server on the global
namespace.
:param query_string: A string with custom query string arguments.
:param headers: A dictionary with custom HTTP headers.
:param auth: Optional authentication data, given as a dictionary.
:param flask_test_client: The instance of the Flask test client
currently in use. Passing the Flask test
client is optional, but is necessary if you
want the Flask user session and any other
cookies set in HTTP routes accessible from
Socket.IO events.
"""
return SocketIOTestClient(app, self, namespace=namespace,
query_string=query_string, headers=headers,
auth=auth,
flask_test_client=flask_test_client)
def _handle_event(self, handler, message, namespace, sid, *args):
environ = self.server.get_environ(sid, namespace=namespace)
if not environ:
# we don't have record of this client, ignore this event
return '', 400
app = environ['flask.app']
with app.request_context(environ):
if self.manage_session:
# manage a separate session for this client's Socket.IO events
# created as a copy of the regular user session
if 'saved_session' not in environ:
environ['saved_session'] = _ManagedSession(flask.session)
session_obj = environ['saved_session']
else:
# let Flask handle the user session
# for cookie based sessions, this effectively freezes the
# session to its state at connection time
# for server-side sessions, this allows HTTP and Socket.IO to
# share the session, with both having read/write access to it
session_obj = flask.session._get_current_object()
_request_ctx_stack.top.session = session_obj
flask.request.sid = sid
flask.request.namespace = namespace
flask.request.event = {'message': message, 'args': args}
try:
if message == 'connect':
auth = args[1] if len(args) > 1 else None
try:
ret = handler(auth)
except TypeError:
ret = handler()
else:
ret = handler(*args)
except:
err_handler = self.exception_handlers.get(
namespace, self.default_exception_handler)
if err_handler is None:
raise
type, value, traceback = sys.exc_info()
return err_handler(value)
if not self.manage_session:
# when Flask is managing the user session, it needs to save it
if not hasattr(session_obj, 'modified') or \
session_obj.modified:
resp = app.response_class()
app.session_interface.save_session(app, session_obj, resp)
return ret
def emit(event, *args, **kwargs):
"""Emit a SocketIO event.
This function emits a SocketIO event to one or more connected clients. A
JSON blob can be attached to the event as payload. This is a function that
can only be called from a SocketIO event handler, as in obtains some
information from the current client context. Example::
@socketio.on('my event')
def handle_my_custom_event(json):
emit('my response', {'data': 42})
:param event: The name of the user event to emit.
:param args: A dictionary with the JSON data to send as payload.
:param namespace: The namespace under which the message is to be sent.
Defaults to the namespace used by the originating event.
A ``'/'`` can be used to explicitly specify the global
namespace.
:param callback: Callback function to invoke with the client's
acknowledgement.
:param broadcast: ``True`` to send the message to all clients, or ``False``
to only reply to the sender of the originating event.
:param to: Send the message to all the users in the given room. If this
argument is not set and ``broadcast`` is ``False``, then the
message is sent only to the originating user.
:param include_self: ``True`` to include the sender when broadcasting or
addressing a room, or ``False`` to send to everyone
but the sender.
:param skip_sid: The session id of a client to ignore when broadcasting
or addressing a room. This is typically set to the
originator of the message, so that everyone except
that client receive the message. To skip multiple sids
pass a list.
:param ignore_queue: Only used when a message queue is configured. If
set to ``True``, the event is emitted to the
clients directly, without going through the queue.
This is more efficient, but only works when a
single server process is used, or when there is a
single addressee. It is recommended to always leave
this parameter with its default value of ``False``.
"""
if 'namespace' in kwargs:
namespace = kwargs['namespace']
else:
namespace = flask.request.namespace
callback = kwargs.get('callback')
broadcast = kwargs.get('broadcast')
to = kwargs.pop('to', kwargs.pop('room', None))
if to is None and not broadcast:
to = flask.request.sid
include_self = kwargs.get('include_self', True)
skip_sid = kwargs.get('skip_sid')
ignore_queue = kwargs.get('ignore_queue', False)
socketio = flask.current_app.extensions['socketio']
return socketio.emit(event, *args, namespace=namespace, to=to,
include_self=include_self, skip_sid=skip_sid,
callback=callback, ignore_queue=ignore_queue)
def send(message, **kwargs):
"""Send a SocketIO message.
This function sends a simple SocketIO message to one or more connected
clients. The message can be a string or a JSON blob. This is a simpler
version of ``emit()``, which should be preferred. This is a function that
can only be called from a SocketIO event handler.
:param message: The message to send, either a string or a JSON blob.
:param json: ``True`` if ``message`` is a JSON blob, ``False``
otherwise.
:param namespace: The namespace under which the message is to be sent.
Defaults to the namespace used by the originating event.
An empty string can be used to use the global namespace.
:param callback: Callback function to invoke with the client's
acknowledgement.
:param broadcast: ``True`` to send the message to all connected clients, or
``False`` to only reply to the sender of the originating
event.
:param to: Send the message to all the users in the given room. If this
argument is not set and ``broadcast`` is ``False``, then the
message is sent only to the originating user.
:param include_self: ``True`` to include the sender when broadcasting or
addressing a room, or ``False`` to send to everyone
but the sender.
:param skip_sid: The session id of a client to ignore when broadcasting
or addressing a room. This is typically set to the
originator of the message, so that everyone except
that client receive the message. To skip multiple sids
pass a list.
:param ignore_queue: Only used when a message queue is configured. If
set to ``True``, the event is emitted to the
clients directly, without going through the queue.
This is more efficient, but only works when a
single server process is used, or when there is a
single addressee. It is recommended to always leave
this parameter with its default value of ``False``.
"""
json = kwargs.get('json', False)
if 'namespace' in kwargs:
namespace = kwargs['namespace']
else:
namespace = flask.request.namespace
callback = kwargs.get('callback')
broadcast = kwargs.get('broadcast')
to = kwargs.pop('to', kwargs.pop('room', None))
if to is None and not broadcast:
to = flask.request.sid
include_self = kwargs.get('include_self', True)
skip_sid = kwargs.get('skip_sid')
ignore_queue = kwargs.get('ignore_queue', False)
socketio = flask.current_app.extensions['socketio']
return socketio.send(message, json=json, namespace=namespace, to=to,
include_self=include_self, skip_sid=skip_sid,
callback=callback, ignore_queue=ignore_queue)
def join_room(room, sid=None, namespace=None):
"""Join a room.
This function puts the user in a room, under the current namespace. The
user and the namespace are obtained from the event context. This is a
function that can only be called from a SocketIO event handler. Example::
@socketio.on('join')
def on_join(data):
username = session['username']
room = data['room']
join_room(room)
send(username + ' has entered the room.', room=room)
:param room: The name of the room to join.
:param sid: The session id of the client. If not provided, the client is
obtained from the request context.
:param namespace: The namespace for the room. If not provided, the
namespace is obtained from the request context.
"""
socketio = flask.current_app.extensions['socketio']
sid = sid or flask.request.sid
namespace = namespace or flask.request.namespace
socketio.server.enter_room(sid, room, namespace=namespace)
def leave_room(room, sid=None, namespace=None):
"""Leave a room.
This function removes the user from a room, under the current namespace.
The user and the namespace are obtained from the event context. Example::
@socketio.on('leave')
def on_leave(data):
username = session['username']
room = data['room']
leave_room(room)
send(username + ' has left the room.', room=room)
:param room: The name of the room to leave.
:param sid: The session id of the client. If not provided, the client is
obtained from the request context.
:param namespace: The namespace for the room. If not provided, the
namespace is obtained from the request context.
"""
socketio = flask.current_app.extensions['socketio']
sid = sid or flask.request.sid
namespace = namespace or flask.request.namespace
socketio.server.leave_room(sid, room, namespace=namespace)
def close_room(room, namespace=None):
"""Close a room.
This function removes any users that are in the given room and then deletes
the room from the server.
:param room: The name of the room to close.
:param namespace: The namespace for the room. If not provided, the
namespace is obtained from the request context.
"""
socketio = flask.current_app.extensions['socketio']
namespace = namespace or flask.request.namespace
socketio.server.close_room(room, namespace=namespace)
def rooms(sid=None, namespace=None):
"""Return a list of the rooms the client is in.
This function returns all the rooms the client has entered, including its
own room, assigned by the Socket.IO server.
:param sid: The session id of the client. If not provided, the client is
obtained from the request context.
:param namespace: The namespace for the room. If not provided, the
namespace is obtained from the request context.
"""
socketio = flask.current_app.extensions['socketio']
sid = sid or flask.request.sid
namespace = namespace or flask.request.namespace
return socketio.server.rooms(sid, namespace=namespace)
def disconnect(sid=None, namespace=None, silent=False):
"""Disconnect the client.
This function terminates the connection with the client. As a result of
this call the client will receive a disconnect event. Example::
@socketio.on('message')
def receive_message(msg):
if is_banned(session['username']):
disconnect()
else:
# ...
:param sid: The session id of the client. If not provided, the client is
obtained from the request context.
:param namespace: The namespace for the room. If not provided, the
namespace is obtained from the request context.
:param silent: this option is deprecated.
"""
socketio = flask.current_app.extensions['socketio']
sid = sid or flask.request.sid
namespace = namespace or flask.request.namespace
return socketio.server.disconnect(sid, namespace=namespace)
| {
"repo_name": "miguelgrinberg/Flask-SocketIO",
"path": "src/flask_socketio/__init__.py",
"copies": "1",
"size": "48388",
"license": "mit",
"hash": 7480011070761966000,
"line_mean": 47.1472636816,
"line_max": 79,
"alpha_frac": 0.5740266182,
"autogenerated": false,
"ratio": 4.952712384851586,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6026739003051587,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
import traceback
from main.models import ExperimentSampleToAlignment
def set_assembly_status(sample_alignment, status, force=False):
"""Sets assembly status field.
"""
sample_alignment = ExperimentSampleToAlignment.objects.get(
uid=sample_alignment.uid)
# Make sure assembly status is not FAILED
if not force:
assert sample_alignment.data.get('assembly_status') != (
ExperimentSampleToAlignment.ASSEMBLY_STATUS.FAILED)
# Set assembly status for UI
sample_alignment.data['assembly_status'] = status
sample_alignment.save()
def get_failure_report_path(sample_alignment, report_filename):
"""Returns full path to given report for ExperimentSampleToAlignment.
"""
return os.path.join(sample_alignment.get_model_data_dir(), report_filename)
def report_failure_stats(file_name):
"""Decorator that writes to file the traceback and exception of the
decorated function, which must have one argument that is an instance
of the ExperimentSampleToAlignment model class that it will set the
assembly status of to failed
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
# Asserts that should fail at beginning of run that should be
# caught at development/test time.
assert len(args) >= 1
sample_alignment_args = [arg for arg in args if
isinstance(arg, ExperimentSampleToAlignment)]
assert len(sample_alignment_args) == 1
sample_alignment = sample_alignment_args[0]
try:
return func(*args, **kwargs)
except Exception as exc:
# Set assembly status to FAILED
set_assembly_status(
sample_alignment,
ExperimentSampleToAlignment.ASSEMBLY_STATUS.FAILED,
force=True)
# Write exception with traceback to file
tb = traceback.format_exc()
file_path = get_failure_report_path(sample_alignment, file_name)
with open(file_path, 'w') as fh:
fh.write('tracback:%s\nexception:%r' % (tb, exc))
# NOTE: Do not raise the exception so that the rest of the
# pipeline can proceed.
return wrapper
return decorator
| {
"repo_name": "churchlab/millstone",
"path": "genome_designer/genome_finish/celery_task_decorator.py",
"copies": "1",
"size": "2433",
"license": "mit",
"hash": 3155846193480020000,
"line_mean": 35.3134328358,
"line_max": 80,
"alpha_frac": 0.6259761611,
"autogenerated": false,
"ratio": 4.652007648183557,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00041040588304269894,
"num_lines": 67
} |
from functools import wraps
import os
import urlparse
import requests
from dropbox import Dropbox
from dropbox.oauth import DropboxOAuth2Flow
from flask import (
Flask,
make_response,
redirect,
render_template,
request,
session,
url_for
)
APP_KEY = os.environ['APP_KEY']
APP_SECRET = os.environ['APP_SECRET']
app = Flask(__name__)
app.config['DEBUG'] = os.environ.get('DEBUG') == 'True'
app.secret_key = os.environ['FLASK_SECRET_KEY']
def get_url(route):
'''Generate a proper URL, forcing HTTPS if not running locally'''
host = urlparse.urlparse(request.url).hostname
url = url_for(route,
_external=True,
_scheme='http' if host in ('127.0.0.1', 'localhost') else
'https')
return url
def get_dropbox_auth_flow():
return DropboxOAuth2Flow(APP_KEY, APP_SECRET, get_url('oauth_callback'),
session, 'dropbox-csrf-token')
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
if 'access_token' not in session:
return redirect(get_flow().start())
else:
return f(*args, **kwargs)
return decorated
@app.route("/login")
def login():
return redirect(get_dropbox_auth_flow().start())
@app.route('/oauth_callback')
def oauth_callback():
'''Callback function for when the user returns from OAuth.'''
access_token, user_id, url_state = get_dropbox_auth_flow().finish(request.args)
session['access_token'] = access_token
return redirect(url_for('index'))
@app.route('/logout')
@requires_auth
def logout():
session.clear()
return redirect(url_for('index'))
@app.route("/")
def index():
return render_template('index.html')
@app.route('/revisions')
@requires_auth
def revisions():
# Shared Link from Dropbox Chooser
link = request.args['link']
# Calling Dropbox API v1
metadata = requests.post('https://api.dropbox.com/1/metadata/link', params={'link': link},
headers={'Authorization': 'Bearer ' + str(session['access_token'])}).json()
# Calling Dropbox API v2
if not metadata.get('path'):
return redirect(url_for('index'))
else:
dbx = Dropbox(session['access_token'])
entries = sorted(dbx.files_list_revisions(metadata['path']).entries, key=lambda entry: entry.client_modified)
entries.reverse()
return render_template('revisions.html', path=metadata['path'], filename=os.path.split(metadata['path'])[1],
revisions=entries)
@app.route('/revision')
@requires_auth
def revision():
dbx = Dropbox(session['access_token'])
f = dbx.files_download(request.args['path'], request.args['rev'])
resp = make_response(f[1].content)
resp.headers["Content-Disposition"] = "attachment; filename=" + f[0].name
return resp
if __name__ == "__main__":
app.run()
| {
"repo_name": "rahulamlekar/revision-browser",
"path": "app.py",
"copies": "1",
"size": "2766",
"license": "apache-2.0",
"hash": -2670434578335101000,
"line_mean": 26.66,
"line_max": 111,
"alpha_frac": 0.6659436009,
"autogenerated": false,
"ratio": 3.3567961165048543,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45227397174048545,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
from client import AuthDecorator, HttpClient
from flask import Flask, jsonify, request, g
from flask_pymongo import PyMongo
from auth import create_user, get_user_by_token, login_user
from errors import APIError
app = Flask(__name__)
mongo_uri = 'mongodb://{}:{}@ds259325.mlab.com:59325/zql'
app.config['MONGO_URI'] = mongo_uri.format(os.environ['ZQL_MONGO_USER'], os.environ['ZQL_MONGO_PASS'])
mongo = PyMongo(app)
# Initialize clients for all of our services.
grammar = AuthDecorator(HttpClient('http://127.0.0.1:2666/'))
interpreter = AuthDecorator(HttpClient('http://127.0.0.1:2020/'))
postprocessing = AuthDecorator(HttpClient('http://127.0.0.1:2015/'))
mapping = {
# Interpreter Service forwarders
'interpret': interpreter,
'keywords': interpreter,
# Grammar Service forwarders
'grammar': grammar,
'grammars': grammar,
# Postprocessing Service forwarders
'event': postprocessing,
'events': postprocessing,
}
def copy_headers():
headers = {}
for header in request.headers.keys():
headers[header] = request.headers.get(header)
return headers
# TBH idk why we need this, but doesn't work without it when testing on localhost
# For now, just add this to POST request handlers
def access_control(func):
@wraps(func)
def wrapper(*args, **kwargs):
if request.method == 'OPTIONS':
resp = Flask.make_default_options_response(app)
resp.headers['Access-Control-Allow-Headers'] = 'Content-Type,token'
resp.headers['Content-Type'] = 'application/json'
else:
resp = func(*args, **kwargs)
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
return wrapper
@app.route('/login', methods=['POST', 'OPTIONS'])
@access_control
def login():
username = request.json.get('username')
password = request.json.get('password')
if username is None or password is None:
raise APIError('Missing username or password', status_code=409)
user = login_user(mongo, username, password)
if not user:
raise APIError('Incorrect username/password', status_code=409)
return jsonify(user)
@app.route('/create_user', methods=['POST', 'OPTIONS'])
@access_control
def new_user():
username = request.json.get('username')
password = request.json.get('password')
permission = request.json.get('permission') # value of 0-2
if username is None or password is None or permission is None:
raise APIError('Missing username, password or permission', status_code=409)
try:
user = create_user(mongo, username, password, permission)
except AssertionError as e:
raise APIError(e.args[0])
return jsonify(user)
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>', methods=['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'])
@access_control
def forward(path):
app.logger.info('Forwarding route: %s method: %s' % (path, request.method))
forwarder = path.split('/')[0]
if forwarder not in mapping:
raise APIError('Path not found in mapping', 401)
res, code = mapping[forwarder].make_request(
path, request.method, request.data, copy_headers())
resp = jsonify(res)
resp.status_code = code
return resp
@app.errorhandler(APIError)
def handle_api_error(e):
response = jsonify(e.to_dict())
response.status_code = e.status_code
return response
| {
"repo_name": "Zubdano/zql",
"path": "zql-backend/gateway/server.py",
"copies": "1",
"size": "3477",
"license": "mit",
"hash": -5760732088150885000,
"line_mean": 28.218487395,
"line_max": 102,
"alpha_frac": 0.6701179177,
"autogenerated": false,
"ratio": 3.6910828025477707,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48612007202477703,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
import cv2
import numpy as np
def img_show(img):
cv2.imshow('img', img)
cv2.waitKey(0)
cv2.destroyAllWindows()
def reshape_and_show(arr, w=20, h=20):
arr = arr.reshape(w, h).astype(np.uint8)
img_show(arr)
def log_call(f):
@wraps(f)
def wrapper(*args, **kwargs):
r = f(*args, **kwargs)
args_str = ", ".join(map(str, args[1:]))
print("%s(%s) => %s" % (f.__name__, args_str, r))
return r
return wrapper
def img_write(img, name, path=None):
if not path:
path = os.path.join('test_images/', name)
else:
path = os.path.join(path, name)
ret = cv2.imwrite(path, img)
if not ret:
raise IOError("Could not write image {} to {}".format(name, path))
def uniformize_points(p1, p2, p3, p4):
"""
Orders 4 points so their order will be top-left, top-right,
bottom-left, bottom-right.
A point is a list/tuple made of two values.
:param p1:
:param p2:
:param p3:
:param p4:
:return:
"""
pts = [p1, p2, p3, p4]
pts.sort(key=lambda x: x[0] + x[1])
if pts[1][0] < pts[2][0]:
pts[1], pts[2] = pts[2], pts[1]
return pts
def _process_wrapper(queue, *args, **kwargs):
func = kwargs.pop('func')
r = func(*args, **kwargs)
queue.put(r)
queue.close()
| {
"repo_name": "bbuhai/sudoku-solver",
"path": "sudoku_solver/util.py",
"copies": "1",
"size": "1359",
"license": "mit",
"hash": 1402844434833954300,
"line_mean": 20.5714285714,
"line_max": 74,
"alpha_frac": 0.5643855776,
"autogenerated": false,
"ratio": 2.867088607594937,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3931474185194937,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os
import numpy
from fuel.datasets import H5PYDataset
class MissingInputFiles(Exception):
"""Exception raised by a converter when input files are not found.
Parameters
----------
filenames : list
A list of filenames that were not found.
"""
def __init__(self, message, filenames):
self.filenames = filenames
super(MissingInputFiles, self).__init__(message, filenames)
def check_exists(required_files):
"""Decorator that checks if required files exist before running.
Parameters
----------
required_files : list of str
A list of strings indicating the filenames of regular files
(not directories) that should be found in the input directory
(which is the first argument to the wrapped function).
Returns
-------
wrapper : function
A function that takes a function and returns a wrapped function.
The function returned by `wrapper` will include input file
existence verification.
Notes
-----
Assumes that the directory in which to find the input files is
provided as the first argument, with the argument name `directory`.
"""
def function_wrapper(f):
@wraps(f)
def wrapped(directory, *args, **kwargs):
missing = []
for filename in required_files:
if not os.path.isfile(os.path.join(directory, filename)):
missing.append(filename)
if len(missing) > 0:
raise MissingInputFiles('Required files missing', missing)
return f(directory, *args, **kwargs)
return wrapped
return function_wrapper
def fill_hdf5_file(h5file, data):
"""Fills an HDF5 file in a H5PYDataset-compatible manner.
Parameters
----------
h5file : :class:`h5py.File`
File handle for an HDF5 file.
data : tuple of tuple
One element per split/source pair. Each element consists of a
tuple of (split_name, source_name, data_array, comment), where
* 'split_name' is a string identifier for the split name
* 'source_name' is a string identifier for the source name
* 'data_array' is a :class:`numpy.ndarray` containing the data
for this split/source pair
* 'comment' is a comment string for the split/source pair
The 'comment' element can optionally be omitted.
"""
# Check that all sources for a split have the same length
split_names = set(split_tuple[0] for split_tuple in data)
for name in split_names:
lengths = [len(split_tuple[2]) for split_tuple in data
if split_tuple[0] == name]
if not all(l == lengths[0] for l in lengths):
raise ValueError("split '{}' has sources that ".format(name) +
"vary in length")
# Initialize split dictionary
split_dict = dict([(split_name, {}) for split_name in split_names])
# Compute total source lengths and check that splits have the same dtype
# across a source
source_names = set(split_tuple[1] for split_tuple in data)
for name in source_names:
splits = [s for s in data if s[1] == name]
indices = numpy.cumsum([0] + [len(s[2]) for s in splits])
if not all(s[2].dtype == splits[0][2].dtype for s in splits):
raise ValueError("source '{}' has splits that ".format(name) +
"vary in dtype")
if not all(s[2].shape[1:] == splits[0][2].shape[1:] for s in splits):
raise ValueError("source '{}' has splits that ".format(name) +
"vary in shapes")
dataset = h5file.create_dataset(
name, (sum(len(s[2]) for s in splits),) + splits[0][2].shape[1:],
dtype=splits[0][2].dtype)
dataset[...] = numpy.concatenate([s[2] for s in splits], axis=0)
for i, j, s in zip(indices[:-1], indices[1:], splits):
if len(s) == 4:
split_dict[s[0]][name] = (i, j, s[3])
else:
split_dict[s[0]][name] = (i, j)
h5file.attrs['split'] = H5PYDataset.create_split_array(split_dict)
| {
"repo_name": "lamblin/fuel",
"path": "fuel/converters/base.py",
"copies": "2",
"size": "4206",
"license": "mit",
"hash": 6654838012265609000,
"line_mean": 36.2212389381,
"line_max": 77,
"alpha_frac": 0.6019971469,
"autogenerated": false,
"ratio": 4.168483647175421,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00016388069485414618,
"num_lines": 113
} |
from functools import wraps
import os
class Lock(object):
"""
Lock implementation. Can also be used as a context-manager or
decorator.
Unlike the redis-py lock implementation, this Lock does not
use a spin-loop when blocking to acquire the lock. Instead,
it performs a blocking pop on a list. When a lock is released,
a value is pushed into this list, signalling that the lock is
available.
The lock uses Lua scripts to ensure the atomicity of its
operations.
You can set a TTL on a lock to reduce the potential for deadlocks
in the event of a crash. If a lock is not released before it
exceeds its TTL, and threads that are blocked waiting for the
lock could potentially re-acquire it.
.. note:: TTL is specified in **milliseconds**.
Locks can be used as context managers or as decorators:
.. code-block:: python
lock = db.lock('my-lock')
with lock:
perform_some_calculations()
@lock
def another_function():
# The lock will be acquired when this function is
# called, and released when the function returns.
do_some_more_calculations()
"""
def __init__(self, database, name, ttl=None, lock_id=None,
lock_test_delay=None):
"""
:param database: A walrus ``Database`` instance.
:param str name: The name for the lock.
:param int ttl: The time-to-live for the lock in milliseconds.
:param str lock_id: Unique identifier for the lock instance.
:param int lock_test_delay: The time between polls when trying to
acquire lock. Defaults to TTL if not defined.
"""
self.database = database
self.name = name
self.ttl = ttl or 0
self._lock_id = lock_id or os.urandom(32)
self.lock_test_delay = lock_test_delay or self.ttl
@property
def key(self):
return 'lock:%s' % (self.name)
@property
def event(self):
return 'lock.event:%s' % (self.name)
def acquire(self, block=True):
"""
Acquire the lock. The lock will be held until it is released
by calling :py:meth:`Lock.release`. If the lock was
initialized with a ``ttl``, then the lock will be released
automatically after the given number of milliseconds.
By default this method will block until the lock becomes
free (either by being released or expiring). The blocking is
accomplished by performing a blocking left-pop on a list, as
opposed to a spin-loop.
If you specify ``block=False``, then the method will return
``False`` if the lock could not be acquired.
:param bool block: Whether to block while waiting to acquire
the lock.
:returns: Returns ``True`` if the lock was acquired.
"""
while True:
acquired = self.database.run_script(
'lock_acquire',
keys=[self.key],
args=[self._lock_id, self.ttl])
if acquired == 1 or not block:
return acquired == 1
# Perform a blocking pop on the event key. When a lock
# is released, a value is pushed into the list, which
# signals listeners that the lock is available.
# Convert the millisecond based TTL or delay value to seconds (rounding up)
timeout_in_int = int(round(1.0 * self.lock_test_delay / 1000))
self.database.blpop(self.event, timeout=timeout_in_int)
def release(self):
"""
Release the lock.
:returns: Returns ``True`` if the lock was released.
"""
unlocked = self.database.run_script(
'lock_release',
keys=[self.key, self.event],
args=[self._lock_id])
return unlocked != 0
def clear(self):
"""
Clear the lock, allowing it to be acquired. Do not use this
method except to recover from a deadlock. Otherwise you should
use :py:meth:`Lock.release`.
"""
self.database.delete(self.key)
self.database.delete(self.event)
def __enter__(self):
self.acquire()
def __exit__(self, exc_type, exc_val, exc_tb):
if not self.release():
raise RuntimeError('Error releasing lock "%s".' % self.name)
def __call__(self, fn):
@wraps(fn)
def inner(*args, **kwargs):
with self:
return fn(*args, **kwargs)
return inner
| {
"repo_name": "johndlong/walrus",
"path": "walrus/lock.py",
"copies": "1",
"size": "4566",
"license": "mit",
"hash": 6251427804450158000,
"line_mean": 33.3308270677,
"line_max": 87,
"alpha_frac": 0.6009636443,
"autogenerated": false,
"ratio": 4.3156899810964084,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00008544087491455913,
"num_lines": 133
} |
from functools import wraps
import os
def restoring_chdir(fn):
#XXX:dc: This would be better off in a neutral module
@wraps(fn)
def decorator(*args, **kw):
try:
path = os.getcwd()
return fn(*args, **kw)
finally:
os.chdir(path)
return decorator
class BaseBuilder(object):
"""
The Base for all Builders. Defines the API for subclasses.
All workflow steps need to return true, otherwise it is assumed something
went wrong and the Builder will stop
"""
workflow = ['clean', 'build', 'move']
def __init__(self, version):
self.version = version
def run(self):
for step in self.workflow:
fn = getattr(self, step)
result = fn()
assert result
@restoring_chdir
def force(self):
"""
An optional step to force a build even when nothing has changed.
"""
print "Forcing a build by touching files"
os.chdir(self.version.project.conf_dir(self.version.slug))
os.system('touch * && touch */*')
def clean(self):
"""
Clean up the version so it's ready for usage.
This is used to add RTD specific stuff to Sphinx, and to
implement whitelists on projects as well.
It is guaranteed to be called before your project is built.
"""
raise NotImplementedError
def build(self):
"""
Do the actual building of the documentation.
"""
raise NotImplementedError
def move(self):
"""
Move the documentation from it's generated place to its final home.
This needs to understand both a single server dev environment,
as well as a multi-server environment.
"""
raise NotImplementedError
@property
def changed(self):
"""
Says whether the documentation has changed, and requires further action.
This is mainly used to short-circuit more expensive builds of other
output formats if the project docs didn't change on an update.
Subclasses are recommended to override for more efficient builds.
Defaults to `True`
"""
return True
| {
"repo_name": "alex/readthedocs.org",
"path": "readthedocs/doc_builder/base.py",
"copies": "1",
"size": "2231",
"license": "mit",
"hash": -2747644196715697700,
"line_mean": 26.5432098765,
"line_max": 80,
"alpha_frac": 0.6069027342,
"autogenerated": false,
"ratio": 4.746808510638298,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5853711244838298,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import os, os.path
import shutil
import subprocess
import numpy
from scipy import special
import apogee.tools.read as apread
import apogee.tools.path as appath
from apogee.tools import toAspcapGrid,_aspcapPixelLimits
from apogee.spec.plot import apStarWavegrid
def specFitInput(func):
"""Decorator to parse the input for spectral fitting"""
@wraps(func)
def input_wrapper(*args,**kwargs):
spec= args[0]
specerr= args[1]
if isinstance(specerr,str): # locID+APOGEE-ID; array
ispec= apread.aspcapStar(spec,specerr,ext=1,header=False,
aspcapWavegrid=True)
ispecerr= apread.aspcapStar(spec,specerr,ext=2,header=False,
aspcapWavegrid=True)
spec= ispec
specerr= ispecerr
elif (isinstance(specerr,(list,numpy.ndarray)) \
and isinstance(specerr[0],str)): # locID+APOGEE-ID; array
aspcapBlu_start,aspcapGre_start,aspcapRed_start,aspcapTotal = _aspcapPixelLimits(dr=None)
nspec= len(specerr)
ispec= numpy.empty((nspec,aspcapTotal))
ispecerr= numpy.empty((nspec,aspcapTotal))
for ii in range(nspec):
ispec[ii]= apread.aspcapStar(spec[ii],specerr[ii],ext=1,
header=False,aspcapWavegrid=True)
ispecerr[ii]= apread.aspcapStar(spec[ii],specerr[ii],ext=2,
header=False,aspcapWavegrid=True)
spec= ispec
specerr= ispecerr
elif isinstance(specerr,(list,numpy.ndarray)) \
and isinstance(specerr[0],(float,numpy.float32,
numpy.float64,numpy.ndarray)) \
and ((len(specerr.shape) == 1 and len(specerr) == 8575)
or (len(specerr.shape) == 2 and specerr.shape[1] == 8575)): #array on apStar grid
spec= toAspcapGrid(spec)
specerr= toAspcapGrid(specerr)
return func(spec,specerr,*args[2:],**kwargs)
return input_wrapper
def convert_modelAtmosphere(**kwargs):
"""
NAME:
convert_modelAtmosphere
PURPOSE:
Convert a model atmosphere to MOOG format
INPUT:
Either:
(a) modelatm= (None) can be set to the filename of a model atmosphere
(b) specify the stellar parameters for a grid point in model atm by
- lib= ('kurucz_filled') spectral library
- teff= (4500) grid-point Teff
- logg= (2.5) grid-point logg
- metals= (0.) grid-point metallicity
- cfe= (0.) grid-point carbon-enhancement
- afe= (0.) grid-point alpha-enhancement
- dr= return the path corresponding to this data release
vmicro= (2.) microturbulence (km/s) (only used if the MOOG-formatted atmosphere file doesn't already exist)
OUTPUT:
(none; just converts and caches the model atmosphere
HISTORY:
2015-02-13 - Written - Bovy (IAS)
2015-03-21 - Adjusted to also work for off-grid atmosphers - Bovy (IAS)
"""
# Get the filename of the model atmosphere
modelatm= kwargs.pop('modelatm',None)
if not modelatm is None:
if isinstance(modelatm,str) and os.path.exists(modelatm):
modelfilename= modelatm
elif isinstance(modelatm,str):
raise ValueError('modelatm= input is a non-existing filename')
else: # model atmosphere instance
raise ValueError('modelatm= in moogsynth should be set to the name of a file')
else:
modelfilename= appath.modelAtmospherePath(**kwargs)
modeldirname= os.path.dirname(modelfilename)
modelbasename= os.path.basename(modelfilename)
outname= modelbasename.replace('.mod','.org')
if os.path.exists(os.path.join(modeldirname,outname)): return None
shutil.copy(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'scripts/makemoogmodel.awk'),modeldirname)
try:
stdout= open(os.path.join(modeldirname,outname),'w')
stderr= open('/dev/null','w')
subprocess.check_call(['awk','-f','makemoogmodel.awk',
'vmicro=%.1f' % kwargs.get('vmicro',2.),
modelfilename],
cwd=modeldirname,
stdout=stdout,stderr=stderr)
stdout.close()
stderr.close()
except: raise
finally:
os.remove(os.path.join(modeldirname,'makemoogmodel.awk'))
return None
def vmacro(x,vmacro=6.,sparse=False,norm=True):
"""
NAME:
vmacro
PURPOSE:
compute the proper macroturbulence kernel
INPUT:
x - Array of X values for which to compute the macroturbulence kernel, in pixel offset relative to pixel centers; the kernel is calculated at the x offsets for each pixel center; x need to be 1/integer equally-spaced pixel offsets
vmacro= (6.) macroturbulence in km/s (FWHM)
sparse= (False) if True, return a sparse representation that can be passed to apogee.spec.lsf.convolve for easy convolution
norm= (True) if False, don't normalize to sum to 1 (useful to check whether the kernel actually integrates to 1)
OUTPUT:
LSF-like array of the macroturbulence
HISTORY:
2015-03-23 - Written - Bovy (IAS)
"""
from apogee.spec.lsf import sparsify
# Convert vmacro to Gaussian sigma / c
sigvm= vmacro/3./10.**5./2./numpy.sqrt(2.*numpy.log(2.))
# Are the x unit pixels or a fraction 1/hires thereof?
hires= int(1./(x[1]-x[0]))
# Setup output
wav= apStarWavegrid()
l10wav= numpy.log10(wav)
dowav= l10wav[1]-l10wav[0]
# Hi-res wavelength for output
hireswav= 10.**numpy.arange(l10wav[0],l10wav[-1]+dowav/hires,dowav/hires)
# Calculate kernel
lam= numpy.tile(hireswav,(len(x),1)).T
dlam= 10.**(numpy.tile(numpy.log10(hireswav),(len(x),1)).T\
+numpy.tile(x,(len(hireswav),1))*dowav)/lam-1.
u= numpy.fabs(dlam/sigvm)
out= 2./numpy.sqrt(numpy.pi)*u\
*(numpy.exp(-u**2.)/u-numpy.sqrt(numpy.pi)*special.erfc(u))
out[dlam == 0.]= 2./numpy.sqrt(numpy.pi)
out*= (1.+dlam)*numpy.log(10.)/sigvm
if norm: out/= numpy.tile(numpy.sum(out,axis=1),(len(x),1)).T
if sparse: out= sparsify(out)
return out
def _chi2(mspec,spec,specerr,weights=None):
"""Internal function that calculates the chi^2 for a given model,
assumes that the wavelength axis==-1"""
if not weights is None:
return numpy.sum(weights*(mspec-spec)**2./specerr**2,axis=-1)
else:
return numpy.sum((mspec-spec)**2./specerr**2,axis=-1)
| {
"repo_name": "jobovy/apogee",
"path": "apogee/modelspec/__init__.py",
"copies": "1",
"size": "6784",
"license": "bsd-3-clause",
"hash": -1198916432249181000,
"line_mean": 44.2266666667,
"line_max": 237,
"alpha_frac": 0.6133549528,
"autogenerated": false,
"ratio": 3.4279939363314806,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9419253032700705,
"avg_score": 0.02441917128615485,
"num_lines": 150
} |
from functools import wraps
import os.path
import theano
from theano import config
from theano import tensor as T
from theano.sandbox.cuda import GpuOp, as_cuda_ndarray_variable, device_properties
from theano.sandbox.cuda.basic_ops import (gpu_contiguous, GpuFromHost, HostFromGpu,
gpu_from_host, host_from_gpu, GpuJoin)
from theano.sandbox.cuda.opt import register_opt, local_optimizer, register_specialize_device
from theano.sandbox.cuda.type import CudaNdarrayType
from theano.tensor.basic import _scal_elemwise
def strip_transfer(variable):
"""
Forcefully strip off a GPU<->host transfer from the given variable.
If `variable` is not directly the result of a GPU<->host transfer, this
function returns `variable` unchanged.
"""
if variable is None:
return
if isinstance(variable.owner.op, (GpuFromHost, HostFromGpu)):
return variable.owner.inputs[0]
return variable
def elemwise_add_force_inplace_tag(fn):
def inner(*args, **kwargs):
var = fn(*args, **kwargs)
var.owner.op.scalar_op.is_mask = True
return var
return inner
@elemwise_add_force_inplace_tag
@_scal_elemwise
def add_inplace(a, *others):
pass
@elemwise_add_force_inplace_tag
@_scal_elemwise
def mul_inplace(a, *others):
pass
class AdvancedSubtensor1Floats(T.subtensor.AdvancedSubtensor1):
"""
Dummy class which supports subtensor indexing with float indices.
This allows us to do GPU subtensor indexing using indices drawn from a
float32 GPU shared variable.
"""
def __init__(self, tag=None):
super(AdvancedSubtensor1Floats, self).__init__()
self._tag = tag
def make_node(self, x, ilist):
# copy-paste of super.make_node, but without the int type constraint
x_ = T.as_tensor_variable(x)
ilist_ = T.as_tensor_variable(ilist)
#if ilist_.type.dtype[:3] not in ('int', 'uin'):
# raise TypeError('index must be integers')
if ilist_.type.ndim != 1:
raise TypeError('index must be vector')
if x_.type.ndim == 0:
raise TypeError('cannot index into a scalar')
bcast = (ilist_.broadcastable[0],) + x_.broadcastable[1:]
return theano.gof.Apply(self, [x_, ilist_],
[T.TensorType(dtype=x.dtype, broadcastable=bcast)()])
def grad(self, inputs, grads):
x, ilist = inputs
gz, = grads
assert len(inputs) == 2
if self.sparse_grad:
raise RuntimeError("sparse grad not supported for AdvancedSubtensor1Floats")
setinc, inpl = self.set_instead_of_inc, self.inplace
inc_op = AdvancedIncSubtensor1Floats(set_instead_of_inc=setinc, inplace=inpl)
rval1 = [inc_op(x.zeros_like(), gz, ilist)]
return rval1 + [T.DisconnectedType()()] * (len(inputs) - 1)
class GpuAdvancedSubtensor1Floats(AdvancedSubtensor1Floats, GpuOp):
def __init__(self, tag=None):
self._tag = tag
def __str__(self):
return "GpuAdvancedSubtensor1Floats(%s)" % self._tag
def make_node(self, x, ilist):
x_ = as_cuda_ndarray_variable(x)
ilist_ = gpu_contiguous(T.cast(ilist, dtype=config.floatX)) # T.as_tensor_variable(ilist)
#if ilist_.type.dtype[:3] not in ('int', 'uin'):
# raise TypeError('index must be integers')
if ilist_.type.ndim != 1:
raise TypeError('index must be vector')
if x_.type.ndim == 0:
raise TypeError('cannot index into a scalar')
# # c code suppose it is int64
# if x.ndim in [1, 2, 3] and ilist_.dtype in [
# 'int8', 'int16', 'int32', 'uint8', 'uint16', 'uint32']:
# ilist_ = tensor.cast(ilist_, 'int64')
bcast = (ilist_.broadcastable[0],) + x_.broadcastable[1:]
return theano.gof.Apply(self, [x_, ilist_],
[CudaNdarrayType(dtype=x.dtype,
broadcastable=bcast)()])
def perform(self, node, inp, out):
raise NotImplementedError("AdvancedSubtensor1FloatsGPU is GPU only")
def c_code_cache_version(self):
return 19
def c_support_code(self):
return """
/*
* Defines `k_take_3` for the case where we have `indices` only as a float32
* view.
*
* d0,... are the output dims
* indices are a list of index to operate on
* They are int32 viewed as float32.
* a is the output
* b is the input
* dB0, the source leading dimensions size
*/
template <int operator_num>
__global__ void k_take_3_float(const int d0, const int d1, const int d2,
const float* indices,
float* a,
const int sA0, const int sA1, const int sA2,
const float* b, const int dB0,
const int sB0, const int sB1, const int sB2,
int* err){
for (int i0 = blockIdx.x; i0 < d0; i0 += gridDim.x){
// Only difference from `k_take_3` -- cast from the float32 view
int idx = (int) indices[i0];
if (idx<0)
idx += dB0; // To allow negative indexing.
if ((idx < 0) || (idx >= dB0)){
// Any value other the 0 probably work. But to be more safe, I want
// to change all bits to prevent problem with concurrent write that
// could cross cache line. But this should not happen with the
// current code and driver.
*err = 0xFFFF;
continue;
}
for (int i1 = threadIdx.x; i1 < d1; i1 += blockDim.x){
for (int i2 = threadIdx.y; i2 < d2; i2 += blockDim.y){
int a_idx = i0*sA0 + i1*sA1 + i2*sA2;
int b_idx = idx*sB0 + i1*sB1 + i2*sB2;
a[a_idx] = b[b_idx];
}
}
}
}
/**
* Defines `CudaNdarray_TakeFrom` in the case where we have `indices` as a
* float32 view.
*
* This is just a copy-paste of `CudaNdarray_TakeFrom` from Theano commit
* 894d66655^ , modified to be accessible from a Theano op's C code (rather
* than as a C->Python binding). See original at
*
* https://github.com/Theano/Theano/blob/894d66655c5b54432bb1d26da910c3cce3f4b830%5E/theano/sandbox/cuda/cuda_ndarray.cu#L742
*/
CudaNdarray *
TakeFrom_Float(CudaNdarray *self, CudaNdarray *indices, long axis,
CudaNdarray *out, const char *clipmode) {
int verbose = 0;
if (verbose) {
printf("indices used on the gpu\\n");
PyObject * used_indices = CudaNdarray_CreateArrayObj(indices);
PyObject_Print(used_indices, stdout, 0);
Py_DECREF(used_indices);
}
if (verbose) printf("after print of object\\n");
if(!CudaNdarray_is_c_contiguous(indices) != 0) {
PyErr_SetString(PyExc_NotImplementedError, "CudaNdarray_TakeFrom: The indices must be contiguous in memory.");
Py_DECREF(indices);
return NULL;
}
int nb_indices = CudaNdarray_SIZE((CudaNdarray *)indices);
if (axis != 0) {
PyErr_SetString(PyExc_NotImplementedError,"CudaNdarray_TakeFrom: only axis=0 is currently supported");
Py_DECREF(indices);
return NULL;
}
//Check argument out
if (out && (out->nd != self->nd ||
CudaNdarray_HOST_DIMS(out)[0] != nb_indices))
out = NULL;
int dims[self->nd];
dims[0] = nb_indices;
for (int i=1 ; i<self->nd ; i++) {
dims[i] = CudaNdarray_HOST_DIMS(self)[i];
if (out && CudaNdarray_HOST_DIMS(out)[i] != dims[i]) {
out = NULL;
}
}
if (!out) {
out = (CudaNdarray*)CudaNdarray_New();
if (!out){
Py_DECREF(indices);
return NULL;
}
if (CudaNdarray_alloc_contiguous(out, self->nd, dims)) {
Py_DECREF(out);
Py_DECREF(indices);
return NULL;
}
}else {
Py_INCREF(out);
}
//Check argument clipmode
if (strcmp(clipmode, "raise") != 0) {
PyErr_SetString(PyExc_NotImplementedError,"CudaNdarray_TakeFrom: only the raise mode is currently supported");
Py_DECREF(indices);
Py_DECREF(out);
return NULL;
}
void (*k3)(const int, const int, const int,
const float*,
float*, const int, const int, const int,
const float*, const int,
const int, const int, const int,
int*);
k3 = k_take_3_float<CPY>;
// Create the memory place that will store the error information.
if (init_err_var() != 0) {
Py_DECREF(indices);
Py_DECREF(out);
return NULL;
}
dim3 n_blocks(std::min(CudaNdarray_HOST_DIMS(out)[0],65535),1,1);
switch (self->nd) {
case 1:
{
dim3 n_threads(1, 1, 1);
if (verbose)
printf("kernel config: (n_blocks.x=%d, n_blocks.y=%d,"
" n_threads.x=%i, n_threads.y=%i)\\n",
n_blocks.x, n_blocks.y, n_threads.x, n_threads.y);
k3<<<n_blocks, n_threads>>>(
dims[0],
1,
1,
CudaNdarray_DEV_DATA(indices),
CudaNdarray_DEV_DATA(out),
CudaNdarray_HOST_STRIDES(out)[0], //strides
1,
1,
CudaNdarray_DEV_DATA(self),
CudaNdarray_HOST_DIMS(self)[0], //For indices check
CudaNdarray_HOST_STRIDES(self)[0], //strides
1,
1,
err_var);
}
break;
case 2:
{
dim3 n_threads(std::min(CudaNdarray_HOST_DIMS(out)[1], 512), 1, 1);
if (verbose)
printf("kernel config: (n_blocks.x=%d, n_blocks.y=%d,"
" n_threads.x=%i, n_threads.y=%i)\\n",
n_blocks.x, n_blocks.y, n_threads.x, n_threads.y);
k3<<<n_blocks, n_threads>>>(
dims[0], //dimensions
dims[1],
1,
CudaNdarray_DEV_DATA(indices),
CudaNdarray_DEV_DATA(out),
CudaNdarray_HOST_STRIDES(out)[0], //strides
CudaNdarray_HOST_STRIDES(out)[1],
1,
CudaNdarray_DEV_DATA(self),
CudaNdarray_HOST_DIMS(self)[0], //For indices check
CudaNdarray_HOST_STRIDES(self)[0], //strides
CudaNdarray_HOST_STRIDES(self)[1],
1,
err_var);
}
break;
case 3:
{
int ty = std::min(CudaNdarray_HOST_DIMS(out)[2], 512);
int tx = std::min(CudaNdarray_HOST_DIMS(out)[1], 512 / ty);
dim3 n_threads(tx, ty, 1);
if (verbose)
printf("kernel config: (n_blocks.x=%d, n_blocks.y=%d,"
" n_threads.x=%i, n_threads.y=%i)\\n",
n_blocks.x, n_blocks.y, n_threads.x, n_threads.y);
k3<<<n_blocks, n_threads>>>(
dims[0], //dimensions
dims[1],
dims[2],
CudaNdarray_DEV_DATA(indices),
CudaNdarray_DEV_DATA(out),
CudaNdarray_HOST_STRIDES(out)[0], //strides
CudaNdarray_HOST_STRIDES(out)[1],
CudaNdarray_HOST_STRIDES(out)[2],
CudaNdarray_DEV_DATA(self),
CudaNdarray_HOST_DIMS(self)[0], //For indices check
CudaNdarray_HOST_STRIDES(self)[0], //strides
CudaNdarray_HOST_STRIDES(self)[1],
CudaNdarray_HOST_STRIDES(self)[2],
err_var);
}
break;
default:
PyErr_SetString(PyExc_NotImplementedError,
"CudaNdarray_TakeFrom: only input with 1, 2 or 3"
" dimensions are currently supported");
}
CNDA_THREAD_SYNC;
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err) {
PyErr_Format(PyExc_RuntimeError,
"Cuda error: %s: %s.\\n",
"CudaNdarray_TakeFrom",
cudaGetErrorString(err));
Py_DECREF(indices);
Py_DECREF(out);
return NULL;
}
// Unsafe: don't copy back to CPU for error checking
/*if (check_err_var() != 0) {
Py_DECREF(indices);
Py_DECREF(out);
return NULL;
}*/
if (verbose) printf("TAKE SUCCEDED\\n");
return out;
}
"""
def c_code(self, node, name, inp, out_, sub):
x, idx = inp
out, = out_
fail = sub["fail"]
return """
CudaNdarray *out = TakeFrom_Float((CudaNdarray *)%(x)s, (CudaNdarray *)%(idx)s, 0,
(CudaNdarray *)%(out)s, "raise");
if (out == NULL) {
%(fail)s;
}
%(out)s = out;
if (cudaGetLastError() != cudaSuccess) {
PyErr_Format(PyExc_RuntimeError, "Cuda error: k_take_3_float: %%s",
cudaGetErrorString(cudaGetLastError()));
%(fail)s;
}
""" % locals()
@register_opt("fast_compile")
@local_optimizer([gpu_from_host, AdvancedSubtensor1Floats])
def local_gpu_advanced_subtensor1_floats(node):
if isinstance(node.op, GpuFromHost):
host_input = node.inputs[0]
if host_input.owner and \
host_input.owner.op.__class__ is AdvancedSubtensor1Floats:
x = host_input.owner.inputs[0]
coords = host_input.owner.inputs[1:]
return [GpuAdvancedSubtensor1Floats(host_input.owner.op._tag)(as_cuda_ndarray_variable(x),
*coords)]
if node.op.__class__ is AdvancedSubtensor1Floats:
x = node.inputs[0]
coords = node.inputs[1:]
# print x.owner.op, x.type, node.op._tag # DEV
if (x.owner and isinstance(x.owner.op, HostFromGpu) and
x.dtype == "float32"):
gpu_x, = x.owner.inputs
return [host_from_gpu(GpuAdvancedSubtensor1Floats(node.op._tag)(gpu_x, *coords))]
return False
class AdvancedIncSubtensor1Floats(T.subtensor.AdvancedIncSubtensor1):
def make_node(self, x, y, ilist):
x_ = T.as_tensor_variable(x)
y_ = T.as_tensor_variable(y)
ilist_ = T.as_tensor_variable(ilist)
#if ilist_.type.dtype[:3] not in ('int', 'uin'):
# raise TypeError('index must be integers')
if ilist_.type.ndim != 1:
raise TypeError('index must be vector')
if x_.type.ndim == 0:
raise TypeError('cannot index into a scalar')
if y_.type.ndim > x_.type.ndim:
if self.set_instead_of_inc:
opname = 'set'
else:
opname = 'increment'
raise TypeError(
'cannot %s x subtensor with ndim=%s'
' by y with ndim=%s' % (
opname, x_.type.ndim, y_.type.ndim))
return theano.gof.Apply(self, [x_, y_, ilist_], [x_.type()])
def grad(self, inputs, grads):
g_output, = grads
x, y, idx_list = inputs
if x.dtype in theano.tensor.discrete_dtypes:
# The output dtype is the same as x
gx = x.zeros_like(dtype=theano.config.floatX)
if y.dtype in theano.tensor.discrete_dtypes:
gy = y.zeros_like(dtype=theano.config.floatX)
else:
gy = y.zeros_like()
elif x.dtype in theano.tensor.complex_dtypes:
raise NotImplementedError("No support for complex grad yet")
else:
if self.set_instead_of_inc:
gx_op = AdvancedIncSubtensor1Floats(set_instead_of_inc=True,
inplace=self.inplace)
gx = gx_op(g_output, y.zeros_like(), idx_list)
else:
gx = g_output
gy = AdvancedSubtensor1Floats()(g_output, idx_list)
gy = T.subtensor._sum_grad_over_bcasted_dims(y, gy)
return [gx, gy] + [T.DisconnectedType()()]
class GpuAdvancedIncSubtensor1Floats_dev20(AdvancedIncSubtensor1Floats, GpuOp):
"""
Modified form of `GpuAdvancedIncSubtensor1_dev20` which supports indices in
float32 view.
"""
def make_node(self, x, y, ilist):
x_ = as_cuda_ndarray_variable(x)
y_ = as_cuda_ndarray_variable(y)
ilist_ = gpu_contiguous(T.cast(ilist, config.floatX))
assert x_.type.dtype == y_.type.dtype
assert x_.type.ndim >= y_.type.ndim
#if ilist_.type.dtype[:3] not in ('int', 'uin'):
# raise TypeError('index must be integers')
if ilist_.type.ndim != 1:
raise TypeError('index must be vector')
if x_.type.ndim == 0:
raise TypeError('cannot index into a scalar')
if y_.type.ndim > x_.type.ndim:
if self.set_instead_of_inc:
opname = 'set'
else:
opname = 'increment'
raise TypeError(
'cannot %s x subtensor with ndim=%s'
' by y with ndim=%s' % (
opname, x_.type.ndim, y_.type.ndim))
return theano.gof.Apply(self, [x_, y_, ilist_], [x_.type()])
def perform(self, node, inp, out):
raise NotImplementedError("GpuAdvancedIncSubtensor1Floats_dev20 supports GPU only")
def c_code_cache_version(self):
return 7
def c_code(self, node, name, inputs, outputs, sub):
x, y, ind = inputs
out, = outputs
fail = sub['fail']
inplace = int(self.inplace)
set_instead_of_inc = int(self.set_instead_of_inc)
return """
Py_XDECREF(%(out)s);
if (!%(inplace)s) {
%(out)s = (CudaNdarray*)CudaNdarray_Copy(%(x)s);
} else {
%(out)s = %(x)s;
Py_XINCREF(%(out)s);
}
if (CudaNdarray_Fvector_add_or_replace_fast(%(out)s, %(y)s, %(ind)s, %(set_instead_of_inc)s) != 0){
%(fail)s
}
if (!%(out)s) {
%(fail)s
}
""" % locals()
def c_support_code_apply(self, node, nodename):
return """
__global__ void k_Fvector_add_or_replace_fast(int numRowsX,
int numColsX,
int stridesX0,
int stridesX1,
float *X,
int numRowsY,
int numColsY,
int stridesY0,
int stridesY1,
float *Y ,
float *d_indices_arr,
int num,
const int set_instead_of_inc,
int* err)
{
for (int i = (blockIdx.x); i < num; i += gridDim.x)
{
for(int j = (threadIdx.x); j < numColsX;j += blockDim.x)
{
int x_row = (int) d_indices_arr[i];
if(x_row < 0)
x_row += numRowsX;
int y_row = i;
if(x_row < numRowsX && x_row >= 0){
if(set_instead_of_inc){
// HACK: Unsafe (non-atomic) update.
X[(x_row * stridesX0) + (j * stridesX1)] = Y[(y_row * stridesY0) + (j * stridesY1)];
} else{
atomicAdd(&X[(x_row * stridesX0) + (j * stridesX1)],
Y[(y_row * stridesY0) + (j * stridesY1)]);
}
} else {
*err = 1;
}
}
}
return;
}
int CudaNdarray_Fvector_add_or_replace_fast(CudaNdarray* py_self,
CudaNdarray* py_other, CudaNdarray *py_indices,
const int set_instead_of_inc)
{
if(init_err_var()!= 0) return -1;
const int *shapeX = CudaNdarray_HOST_DIMS(py_self);
const int *shapeY = CudaNdarray_HOST_DIMS(py_other);
const int *strX = CudaNdarray_HOST_STRIDES(py_self);
const int *strY = CudaNdarray_HOST_STRIDES(py_other);
unsigned int size = (unsigned int)CudaNdarray_SIZE(py_indices);
if(size == 0){
return 0;
}
unsigned int numcolsX = shapeX[1];
unsigned int num_threads_per_block = std::min(
numcolsX, (unsigned int)NUM_VECTOR_OP_THREADS_PER_BLOCK);
unsigned int num_blocks = std::min(
size, (unsigned int)NUM_VECTOR_OP_BLOCKS);
dim3 n_blocks(num_blocks);
dim3 n_threads(num_threads_per_block);
cudaError_t err;
k_Fvector_add_or_replace_fast<<<n_blocks, n_threads>>>(
shapeX[0],
shapeX[1],
strX[0],
strX[1],
CudaNdarray_DEV_DATA(py_self),
shapeY[0],
shapeY[1],
strY[0],
strY[1],
CudaNdarray_DEV_DATA(py_other),
CudaNdarray_DEV_DATA(py_indices),
size,
set_instead_of_inc,
err_var
);
// Unsafe: This induces a DtoH transfer. Only enable for dev and the like.
//int index_err = check_err_var();
//if(index_err != 0) return -1;
err = cudaGetLastError();
if(err != cudaSuccess){
PyErr_Format(
PyExc_RuntimeError,
"GpuAdvancedIncSubtensor1Floats_dev20: cuda error: %%s",
cudaGetErrorString(err));
return -1;
}
return 0;
}
""" % locals()
class GpuAdvancedIncSubtensor1Floats_scal_dev20(AdvancedIncSubtensor1Floats, GpuOp):
"""
Modified form of `GpuAdvancedIncSubtensor1_dev20` which supports indices in
float32 view and scalar set values.
"""
def make_node(self, x, y, ilist):
x_ = as_cuda_ndarray_variable(x)
y_ = as_cuda_ndarray_variable(y)
ilist_ = gpu_contiguous(T.cast(ilist, config.floatX))
assert x_.type.dtype == y_.type.dtype
assert x_.type.ndim >= y_.type.ndim
#if ilist_.type.dtype[:3] not in ('int', 'uin'):
# raise TypeError('index must be integers')
if ilist_.type.ndim != 1:
raise TypeError('index must be vector')
if x_.type.ndim == 0:
raise TypeError('cannot index into a scalar')
if y_.type.ndim > x_.type.ndim:
if self.set_instead_of_inc:
opname = 'set'
else:
opname = 'increment'
raise TypeError(
'cannot %s x subtensor with ndim=%s'
' by y with ndim=%s' % (
opname, x_.type.ndim, y_.type.ndim))
return theano.gof.Apply(self, [x_, y_, ilist_], [x_.type()])
def perform(self, node, inp, out):
raise NotImplementedError("GpuAdvancedIncSubtensor1Floats_dev20 supports GPU only")
def c_code_cache_version(self):
return 6
def c_code(self, node, name, inp, out, sub):
x, y, ind = inp
out, = out
fail = sub["fail"]
inplace = int(self.inplace)
set_instead_of_inc = int(self.set_instead_of_inc)
return """
Py_XDECREF(%(out)s);
if (!%(inplace)s) {
%(out)s = (CudaNdarray*)CudaNdarray_Copy(%(x)s);
} else {
%(out)s = %(x)s;
Py_XINCREF(%(out)s);
}
if (CudaNdarray_broadcast_inc_scalar(%(out)s, %(y)s, %(ind)s, %(set_instead_of_inc)s) != 0){
%(fail)s
}
if (!%(out)s) {
%(fail)s
}
""" % locals()
def c_support_code_apply(self, node, nodename):
return """
__global__ void k_broadcast_inc_scalar(int nX, int strX, float *d_X,
const float *d_Y,
const float *d_indices, int n,
const int set_instead_of_inc, int *err)
{
int idx = blockIdx.x * blockDim.x + threadIdx.x;
if (idx >= n)
return;
idx = (int) d_indices[idx];
if (idx >= nX)
return;
if (set_instead_of_inc) {
d_X[idx * strX] = d_Y[0];
} else {
d_X[idx * strX] += d_Y[0];
}
}
int CudaNdarray_broadcast_inc_scalar(CudaNdarray* py_self,
CudaNdarray* py_other, CudaNdarray* py_indices,
const int set_instead_of_inc)
{
if(init_err_var()!= 0) return -1;
int size = CudaNdarray_SIZE(py_indices);
if(size == 0){
return 0;
}
unsigned int n_threads = std::min(size, NUM_VECTOR_OP_THREADS_PER_BLOCK);
unsigned int n_blocks = std::min(NUM_VECTOR_OP_BLOCKS,
(size + NUM_VECTOR_OP_THREADS_PER_BLOCK - 1) / NUM_VECTOR_OP_THREADS_PER_BLOCK);
cudaError_t err;
k_broadcast_inc_scalar<<<n_blocks, n_threads>>>(
CudaNdarray_SIZE(py_self),
CudaNdarray_HOST_STRIDES(py_self)[0],
CudaNdarray_DEV_DATA(py_self),
CudaNdarray_DEV_DATA(py_other),
CudaNdarray_DEV_DATA(py_indices),
size, set_instead_of_inc, err_var);
// Unsafe: This induces a DtoH transfer. Only enable for dev and the like.
/*int index_err = check_err_var();
if(index_err != 0) return -1;*/
err = cudaGetLastError();
if(err != cudaSuccess){
PyErr_Format(
PyExc_RuntimeError,
"GpuAdvancedIncSubtensor1Floats_scal_dev20: cuda error: %%s",
cudaGetErrorString(err));
return -1;
}
return 0;
}
""" % locals()
@register_opt("fast_compile")
@local_optimizer([gpu_from_host, AdvancedIncSubtensor1Floats])
def local_gpu_advanced_incsubtensor1_scal_floats(node):
supported_dims = {
# x.ndim, y.ndim
(1, 0): GpuAdvancedIncSubtensor1Floats_scal_dev20,
(2, 2): GpuAdvancedIncSubtensor1Floats_dev20,
}
if isinstance(node.op, GpuFromHost):
host_input = node.inputs[0]
# Should not execute for GpuAdvancedIncSubtensor1
if host_input.owner and \
host_input.owner.op.__class__ is AdvancedIncSubtensor1Floats:
x, y = host_input.owner.inputs[0:2]
dims = (x.ndim, y.ndim)
if dims not in supported_dims.keys():
return False
coords = host_input.owner.inputs[2:]
set_instead_of_inc = host_input.owner.op.set_instead_of_inc
inplace = host_input.owner.op.inplace
gpu_op = supported_dims[dims](inplace=inplace,
set_instead_of_inc=set_instead_of_inc)
return [gpu_op(as_cuda_ndarray_variable(x),
as_cuda_ndarray_variable(y), *coords)]
# Should not execute for GpuAdvancedIncSubtensor1
if (node.op.__class__ is AdvancedIncSubtensor1Floats and
node.inputs[0].dtype == "float32" and
node.inputs[1].dtype == "float32" and
node.inputs[2].dtype == "float32"):
x, y = node.inputs[0:2]
dims = (x.ndim, y.ndim)
if dims not in supported_dims:
return False
coords = node.inputs[2:]
go_gpu = False
if x.owner and isinstance(x.owner.op, HostFromGpu):
go_gpu = True
gpu_x, = x.owner.inputs
else:
gpu_x = as_cuda_ndarray_variable(x)
if y.owner and isinstance(y.owner.op, HostFromGpu):
go_gpu = True
gpu_y, = y.owner.inputs
else:
gpu_y = as_cuda_ndarray_variable(y)
if go_gpu:
set_instead_of_inc = node.op.set_instead_of_inc
inplace = node.op.inplace
gpu_op = supported_dims[dims](inplace=inplace,
set_instead_of_inc=set_instead_of_inc)
return [host_from_gpu(gpu_op(gpu_x, gpu_y, *coords))]
return False
class JoinUnsafe(T.Join):
pass
class GpuJoinUnsafe(GpuJoin):
"""Implements GPU-based join without error checking."""
def c_code(self, node, name, inputs, out_, sub):
nd = node.inputs[1].ndim
if not all(i.ndim == nd for i in node.inputs[2:]):
# all inputs ndarray need to have the same number of dimensions
raise NotImplementedError()
axis = inputs[0]
n_cndas = len(inputs[1:])
input_1 = inputs[1]
fail = sub['fail']
out = out_[0]
# getting the shapes of all the involved tensors (input[0]+out)
str = """
int axis = PyInt_AsLong((PyObject*)%(axis)s);
const int nd = %(nd)s;
int shape_out[nd];
int width_sum = 0;
int errorcode;
int sum = 0;
PyObject *slice_tuple = NULL;
PyObject *section_slice = NULL;
PyObject *full_slice = NULL;
PyObject *out_sub = NULL;
PyObject *start, *stop;
start = NULL;
stop = NULL;
""" % locals()
# Test negative axis
str += """
if( axis < -nd ){
PyErr_Format(PyExc_IndexError,
"Join axis %%d out of bounds [0, %%d)", axis, nd);
%(fail)s
}
if( axis < 0 ){
axis = axis + nd;
}
""" % locals()
# getting the shapes of all the involved tensors (input[1:])
# + check: all input tensors have same shape as final out
# except for "axis" dimension
# shape_%(cdna)s[nd] is initialized before, to prevent following
# error: jump to label __label_9 crosses initialization of
# shape_%(cdna)s[nd]
for i, cdna in enumerate(theano.gof.utils.uniq(inputs[1:])):
str += """
int shape_%(cdna)s[nd];
""" % locals()
str += """
if(-1 == axis && PyErr_Occurred()){
%(fail)s;
}
full_slice = PySlice_New(NULL, NULL, NULL);
if(full_slice == NULL){
%(fail)s;
}
for(int i = 0; i<nd; i+=1)
{
shape_%(input_1)s[i] = CudaNdarray_HOST_DIMS(%(input_1)s)[i];
shape_out[i] = shape_%(input_1)s[i];
}
""" % locals()
for i, cdna in enumerate(theano.gof.utils.uniq(inputs[2:])):
str += """
for(int i = 0; i<nd; i+=1)
{
shape_%(cdna)s[i] = CudaNdarray_HOST_DIMS(%(cdna)s)[i];
if((i!=axis) && (shape_%(cdna)s[i]!=shape_out[i]))
{
PyErr_Format(
PyExc_ValueError,
"GpuJoin: Wrong inputs for input %%d related"
" to inputs 0.!",
i);
%(fail)s;
}
}
""" % locals()
# computing the new shape for the out tensors
for i, cdna in enumerate(inputs[1:]):
str += "\t\twidth_sum += CudaNdarray_HOST_DIMS(%(cdna)s)[axis];\n" % locals()
str += "\t\tshape_out[axis] = width_sum;\n"
# preparing the output array + init of the necessary variables
# for the data transfer
str += """
if (CudaNdarray_prep_output(&%(out)s, nd, shape_out))
{
%(fail)s;
}
""" % locals()
# start copying the data into the new out tensors
for i, cdna in enumerate(inputs[1:]):
str += """
sum += shape_%(cdna)s[axis];
stop = PyInt_FromLong(sum);
slice_tuple = PyTuple_New(nd);
if(slice_tuple == NULL){
%(fail)s;
}
section_slice = PySlice_New(start, stop, NULL);
if(section_slice == NULL){
%(fail)s;
}
for(int i=0; i<nd; i++)
{
if(i!=axis)
{
Py_INCREF(full_slice);
PyTuple_SetItem(slice_tuple, i, full_slice);
}
else
{
Py_INCREF(section_slice);
PyTuple_SetItem(slice_tuple, i, section_slice);
}
}
out_sub = CudaNdarray_Subscript((PyObject*)%(out)s, slice_tuple);
if(out_sub == NULL){
Py_XDECREF(start);
Py_XDECREF(stop);
Py_XDECREF(slice_tuple);
Py_XDECREF(out_sub);
Py_XDECREF(%(out)s);
%(fail)s;
}
Py_CLEAR(slice_tuple);
Py_CLEAR(section_slice);
// Unsafe: skip error checking.
/*errorcode = CudaNdarray_CopyFromCudaNdarray(
(CudaNdarray*)out_sub, %(cdna)s);
if(errorcode != 0)
{
Py_XDECREF(start);
Py_XDECREF(stop);
Py_XDECREF(out_sub);
Py_XDECREF(%(out)s);
%(fail)s;
}*/
Py_XDECREF(out_sub);
Py_XDECREF(start);
start = stop;
stop = NULL;
""" % locals()
str += """
Py_XDECREF(start);
Py_XDECREF(stop);
"""
return str
def c_code_cache_version(self):
return (1,)
@register_opt("fast_compile")
@local_optimizer([JoinUnsafe])
def local_gpu_join_unsafe(node):
"""
Inspired by the opt for convop.
Very loose notation follows.
Subgraphs concerned first look like
[array of HostTensor] -> HostToGpu -> GpuToHost
-> Join -> HostToGpu -> GpuToHost
First we apply this Opt:
join(host_from_gpu) -> host_from_gpu(gpu_join)
then, as an intermediate result, there should be
host_from_gpu(gpu_join) -> HostToGpu -> GpuToHost
this unnecessary GpuToHost -> HostToGpu should be removed
by other opts, leaving us with
host_from_gpu(gpu_join)
For intermediate places in the graph not covered by the first opt, the
following could be useful:
gpu_from_host(join) -> gpu_join(gpu_from_host)
not implemented yet.
"""
if isinstance(node.op, JoinUnsafe):
# optimizing this case:
# join(host_from_gpu) -> host_from_gpu(gpu_join)
axis_and_tensors = node.inputs
matches = [t.dtype == 'float32' and
((t.owner is not None and
isinstance(t.owner.op, HostFromGpu)) or
isinstance(t, theano.gof.Constant)) for t in axis_and_tensors[1:]]
if all(matches):
new_tensors = [as_cuda_ndarray_variable(t)
for t in axis_and_tensors[1:]]
new_a_and_t = [axis_and_tensors[0]] + new_tensors
replacement_node = host_from_gpu(GpuJoinUnsafe()(*new_a_and_t))
return [replacement_node]
class GpuRowSwitch(GpuOp):
"""
Row-wise switch between rank-2 matrices on the GPU.
DOES NOT support broadcasting arguments (e.g. T.switch(mask, A, 0)).
>>> A
[[ 0.01902644 0.70658928 0.10509603]
[ 0.2654964 0.08410256 0.96556276]
[ 0.06885902 0.49623388 0.18812495]
[ 0.56566966 0.52721274 0.48890418]]
>>> B
[[ 0.44089654 0.46353787 0.59428871]
[ 0.88936949 0.74785614 0.80535758]
[ 0.88973558 0.21844074 0.12561291]
[ 0.01211281 0.86583334 0.9793455 ]]
>>> mask
[1 0 0 1]
>>> GpuRowSwitch()(mask, A, B).eval()
[[ 0.01902644 0.70658928 0.10509603]
[ 0.88936949 0.74785614 0.80535758]
[ 0.88973558 0.21844074 0.12561291]
[ 0.56566966 0.52721274 0.48890418]]
"""
nin = 3
nout = 1
def make_node(self, cond, ift, iff):
if any(ift.broadcastable) or any(iff.broadcastable):
raise ValueError("GPURowSwitch cannot operate on broadcastable "
"output arguments (ift %s, iff %s)."
% ift.broadcastable, iff.broadcastable)
out_type = ift.dtype
cond = as_cuda_ndarray_variable(
T.cast(cond.flatten(), "float32"))
ift = as_cuda_ndarray_variable(ift)
iff = as_cuda_ndarray_variable(iff)
assert ift.type.dtype == iff.type.dtype
assert cond.ndim == 1, cond.ndim
assert ift.ndim == iff.ndim
return theano.gof.Apply(
self, [cond, ift, iff],
[CudaNdarrayType(broadcastable=ift.broadcastable,
dtype=out_type)()])
def perform(self, node, inp, out):
raise NotImplementedError("GPUSwitch is GPU only")
def c_support_code(self):
"""Defines the abstract row-switching kernel used in this op."""
return """
__global__ void k_row_switch(int ndim,
int shape1, int shape2, int shape3,
int stride1, int stride2, int stride3,
const float* cond, const float* ift,
const float* iff, float* out) {
// batch index
int idx = blockIdx.x * blockDim.x + threadIdx.x;
if (idx < 0 || idx >= shape1) {
return;
}
const float *src = ((int) cond[idx]) ? ift : iff;
int offset = idx * stride1;
int lastDim = ndim == 2 ? shape2 : shape3;
int lastStride = ndim == 2 ? stride2 : stride3;
if (ndim == 3) {
// index within the example
int axis1_idx = blockIdx.y * blockDim.y + threadIdx.y;
offset += axis1_idx * stride2;
}
for (int j = 0; j < lastDim; j++) {
out[offset + j * lastStride] = src[offset + j * lastStride];
}
return;
}
""" % locals()
def c_code(self, node, name, inp, out, sub):
"""Generates code to instantiate this op for these particular inputs."""
cond, ift, iff = inp
out, = out
fail = sub["fail"]
return """
int err, N, N2, ndim;
cudaError_t sts;
int threads_per_block1, n_blocks1;
int threads_per_block2 = 1, n_blocks2 = 1;
const int *dims, *strides;
N = CudaNdarray_SIZE(%(cond)s);
printf("size %%d\\n", N);
ndim = CudaNdarray_NDIM(%(ift)s);
switch (ndim) {
case 3:
N2 = CudaNdarray_HOST_DIMS(%(ift)s)[1];
threads_per_block2 = std::min(N2, NUM_VECTOR_OP_THREADS_PER_BLOCK);
n_blocks2 = std::min(NUM_VECTOR_OP_BLOCKS,
(N2 + NUM_VECTOR_OP_THREADS_PER_BLOCK - 1) / NUM_VECTOR_OP_THREADS_PER_BLOCK);
// NB: no break!
case 2:
threads_per_block1 = std::min(N, NUM_VECTOR_OP_THREADS_PER_BLOCK);
n_blocks1 = std::min(NUM_VECTOR_OP_BLOCKS,
(N + NUM_VECTOR_OP_THREADS_PER_BLOCK - 1) / NUM_VECTOR_OP_THREADS_PER_BLOCK);
break;
default:
return 1;
}
dim3 n_blocks(n_blocks1, n_blocks2);
dim3 threads_per_block(threads_per_block1, threads_per_block2);
// Allocate the output array
Py_XDECREF(%(out)s);
%(out)s = (CudaNdarray *) CudaNdarray_NewDims(ndim, CudaNdarray_HOST_DIMS(%(ift)s));
if (!%(out)s) {
%(fail)s;
}
dims = CudaNdarray_DIMS(%(ift)s);
strides = CudaNdarray_STRIDES(%(ift)s);
// Instantiate the kernel.
//
// TODO: Assumes stride of ift, iff are the same
k_row_switch<<<n_blocks, threads_per_block>>>(
ndim,
dims[0], dims[1], dims[2],
strides[0], strides[1], strides[2],
CudaNdarray_DEV_DATA(%(cond)s),
CudaNdarray_DEV_DATA(%(ift)s),
CudaNdarray_DEV_DATA(%(iff)s),
CudaNdarray_DEV_DATA(%(out)s));
// Force async kernel instances to sync at this thread barrier
CNDA_THREAD_SYNC;
sts = cudaGetLastError();
if (cudaSuccess != sts) {
PyErr_Format(PyExc_RuntimeError, "Cuda error: k_row_switch: %%s.",
cudaGetErrorString(sts));
%(fail)s;
}
""" % locals()
@register_specialize_device("fast_compile")
@local_optimizer([theano.tensor.Elemwise, theano.scalar.Switch])
def local_gpua_row_switch(node):
"""
Detects eligible Switch instances and replaces them with a GPU
row switch.
"""
if (node.op.__class__ == T.Elemwise
and node.op.scalar_op.__class__ != theano.scalar.Switch):
return False
cond, ift, iff = node.inputs
out, = node.outputs
# Only applies to Switch instances where a vector mask broadcasts over
# matrices.
bcast = cond.broadcastable
if not bcast or not (not bcast[0] and all(bcast[1:])
and ift.ndim in [2, 3]):
return False
if not (ift.dtype == iff.dtype == "float32"):
return False
if cond.owner and isinstance(cond.owner.op, HostFromGpu):
gpu_cond, = cond.owner.inputs
else:
gpu_cond = as_cuda_ndarray_variable(
T.cast(cond.flatten(), "float32"))
if ift.owner and isinstance(ift.owner.op, HostFromGpu):
gpu_ift, = ift.owner.inputs
else:
gpu_ift = as_cuda_ndarray_variable(ift)
if iff.owner and isinstance(iff.owner.op, HostFromGpu):
gpu_iff, = iff.owner.inputs
else:
gpu_iff = as_cuda_ndarray_variable(iff)
gpu_op = GpuRowSwitch()
return [HostFromGpu()(gpu_op(cond, gpu_ift, gpu_iff))]
class GpuMaskedCAReduce(GpuOp):
# DEV: Only supporting reduce_100 with switch over mask vector.
# No promise re: what will happen elsewhere... !
"""
Reduce two rank-N tensors with some elemwise op, masking over the first
dimension to produce an N-1 dimensional result.
>>> ift
array([[[ 3., 1.],
[ 4., 8.]],
[[ 9., 4.],
[ 3., 6.]],
[[ 5., 2.],
[ 6., 2.]]])
>>> iff
array([[[ 10., 3.],
[ 3., 5.]],
[[ 2., 1.],
[ 5., 9.]],
[[ 0., 6.],
[ 3., 4.]]])
>>> mask
[0 1 0]
>>> GpuMaskedCAReduce(theano.scalar.add)(mask, ift, iff).eval()
array([[ 19., 13.],
[ 9., 15.]])
>>> iff[0] + ift[1] + iff[2]
array([[ 19., 13.],
[ 9., 15.]])
"""
nin = 3
nout = 1
def __hash__(self):
return hash(type(self))
def __eq__(self, other):
return type(self) == type(other)
def make_node(self, cond, ift, iff):
if any(ift.broadcastable) or any(iff.broadcastable):
raise ValueError("GpuMaskedCAReduce cannot operate on "
"broadcastable output arguments (ift %s, iff %s)."
% ift.broadcastable, iff.broadcastable)
out_type = ift.dtype
cond = as_cuda_ndarray_variable(
T.cast(cond.flatten(), "float32"))
ift = as_cuda_ndarray_variable(ift)
iff = as_cuda_ndarray_variable(iff)
# TODO check contiguous?
assert ift.type.dtype == iff.type.dtype
assert cond.ndim == 1, cond.ndim
assert ift.ndim == iff.ndim
out_bcast = ift.broadcastable[1:]
return theano.gof.Apply(
self, [cond, ift, iff],
[CudaNdarrayType(broadcastable=out_bcast,
dtype=out_type)()])
def perform(self, node, inp, out):
raise NotImplementedError("GpuMaskedCAReduce is GPU only")
def c_code_cache_version(self):
return 18
def c_support_code(self):
"""Defines the abstract row-switching kernel used in this op."""
# reduce_fct = self._assign_reduce(node, nodename, "myresult",
# "X[a * sX0 + b * sX1 + c * sX2]",
# {}, True)
# reduce_init = self._assign_init("X[a * sX0 + 0 * sX1 + c * sX2]")
return """
// A, B, C = x.shape[1, 0, 2]
// D = C / 32
// n_blocks(A, D)
static __global__ void k_masked_careduce(const int A, const int B,
const int C, const int D,
const float *X,
const int sX0, const int sX1,
const int sX2,
const float *Y, // Strides must be the same as X
const float *mask,
const int sMask,
float *Z,
const int sZ0, const int sZ1) {
const int threadCount = blockDim.x;
const int threadNum = threadIdx.x;
float myresult = 0.0f;
if (warpSize != 32)
return; //TODO: set error code
for (int a = blockIdx.x; a < A; a += gridDim.x) {
for (int i2_D = blockIdx.y; i2_D < D; i2_D += gridDim.y) {
int c = i2_D * 32 + threadIdx.x;
if (c < C) {
myresult = 0.0f;
const float *X_base = &(X[a * sX0 + 0 * sX1 + c * sX2]);
const float *Y_base = &(Y[a * sX0 + 0 * sX1 + c * sX2]);
for (int b = 0; b < B; b++) {
float X_b = X_base[b * sX1];
float Y_b = Y_base[b * sX1];
float mask_b = mask[b * sMask];
// TODO: Faster to do a comparison + ternary op here?
myresult += mask_b * X_b + (1.0 - mask_b) * Y_b;
}
Z[a * sZ0 + c * sZ1] = myresult;
}
}
}
}
""" % locals()
def c_code(self, node, name, inp, out, sub):
"""Generates code to instantiate this op for these particular inputs."""
mask, x, y = inp
out, = out
fail = sub["fail"]
# TODO: Assumes out is alloced. OK?
return """
dim3 n_threads(32, 1, 1);
int A = CudaNdarray_HOST_DIMS(%(x)s)[1];
int B = CudaNdarray_HOST_DIMS(%(x)s)[0];
int C = CudaNdarray_HOST_DIMS(%(x)s)[2];
int D = C/32;
if (32*D < C) D+= 1;
assert ((C <= 32*D) && (32*D < C+32));
dim3 n_blocks(A,D);
if (n_blocks.x > NUM_VECTOR_OP_BLOCKS)
n_blocks.x = NUM_VECTOR_OP_BLOCKS;
if (n_blocks.x*n_blocks.y > NUM_VECTOR_OP_BLOCKS)
n_blocks.y = NUM_VECTOR_OP_BLOCKS/n_blocks.x;
int n_shared = 0;
cudaError_t sts;
int out_ndim = 2;
int out_shape[2] = {CudaNdarray_HOST_DIMS(%(x)s)[1], CudaNdarray_HOST_DIMS(%(x)s)[2]};
if (!%(out)s) {
%(out)s = (CudaNdarray*) CudaNdarray_ZEROS(out_ndim, out_shape);
}
k_masked_careduce<<<n_blocks, n_threads, n_shared>>>(
A,B,C,D,
CudaNdarray_DEV_DATA(%(x)s),
CudaNdarray_HOST_STRIDES(%(x)s)[1],
CudaNdarray_HOST_STRIDES(%(x)s)[0],
CudaNdarray_HOST_STRIDES(%(x)s)[2],
CudaNdarray_DEV_DATA(%(y)s),
CudaNdarray_DEV_DATA(%(mask)s),
CudaNdarray_HOST_STRIDES(%(mask)s)[0],
CudaNdarray_DEV_DATA(%(out)s),
CudaNdarray_HOST_STRIDES(%(out)s)[0],
CudaNdarray_HOST_STRIDES(%(out)s)[1]
);
CNDA_THREAD_SYNC;
sts = cudaGetLastError();
if (cudaSuccess != sts)
{
PyErr_Format(PyExc_RuntimeError,
"Cuda error: %%s: %%s."
" (grid: %%i x %%i; block: %%i x %%i x %%i)\\n",
"k_masked_careduce",
cudaGetErrorString(sts),
n_blocks.x,
n_blocks.y,
n_threads.x,
n_threads.y,
n_threads.z);
%(fail)s;
}
""" % locals()
from theano.sandbox.cuda.basic_ops import GpuCAReduce, GpuElemwise
from theano.sandbox.cuda.opt import local_gpu_careduce
@register_opt("fast_compile")
@local_optimizer([GpuCAReduce, T.elemwise.CAReduce, T.elemwise.Sum])
def local_gpu_masked_careduce(node):
"""
Detects eligible CAReduce{add}(GpuElemwise{Switch}) instances and replaces
them with a masked CAReduce.
"""
# TODO: Probably don't need this hack checking for both GpuCAReduce and its
# non-gpu counterpart anymore. Just the GPU should be fine.
if not isinstance(node.op, GpuCAReduce):
# Send this off to local_gpu_careduce first.
# HACK: This happens outside of the standard optimization sequence.
ret = local_gpu_careduce.transform(node)
if not ret:
return False
print "local_gpu_careduce returned with", ret
if isinstance(ret[0].owner.op, HostFromGpu):
ret = ret[0].owner.inputs[0].owner
else:
ret = ret[0].owner
node = ret
if node.op.scalar_op.__class__ != theano.scalar.Add:
return False
above = node.inputs[0].owner
if above is None or not isinstance(above.op, GpuElemwise):
return False
# The graph looks okay. Check the dims.
if node.op.reduce_mask != (1, 0, 0):
return False
if node.op.pre_scalar_op:
return False
# Check switch op.
# TODO: Check that it's actually a switch .. !
if len(above.inputs) != 3:
return False
mask, ift, iff = above.inputs
if not mask.broadcastable:
return False
if not (not mask.broadcastable[0] and all(mask.broadcastable[1:])):
return False
if any(ift.broadcastable) or any(iff.broadcastable):
return False
new_op = GpuMaskedCAReduce()
return [new_op(mask, ift, iff)]
| {
"repo_name": "stanfordnlp/spinn",
"path": "python/spinn/util/cuda.py",
"copies": "1",
"size": "50063",
"license": "mit",
"hash": -5141168164329313000,
"line_mean": 33.7901320361,
"line_max": 129,
"alpha_frac": 0.5163294249,
"autogenerated": false,
"ratio": 3.571591638724406,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4587921063624406,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import pandas as pd
from arctic.chunkstore.chunkstore import ChunkStore
import pytech.utils as utils
from pytech.mongo import ARCTIC_STORE, BarStore
from pytech.utils.exceptions import InvalidStoreError, PyInvestmentKeyError
from pandas.tseries.offsets import BDay
from pytech.data._holders import DfLibName
def memoize(obj):
"""Memoize functions so they don't have to be reevaluated."""
cache = obj.cache = {}
@wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
def optional_arg_decorator(fn):
"""Used to **only** to wrap decorators that take optional arguments."""
def wrapped_decorator(*args):
if len(args) == 1 and callable(args[0]):
return fn(args[0])
else:
def real_decorator(decoratee):
return fn(decoratee, *args)
return real_decorator
return wrapped_decorator
def write_chunks(chunk_size='D', remove_ticker=True):
"""
Used to wrap functions that return :class:`pd.DataFrame`s and writes the
output to a :class:`ChunkStore`. It is required that the the wrapped
function contains a column called 'ticker' to use as the key in the db.
:param lib_name: The name of the library to write the
:class:`pd.DataFrame` to.
:param chunk_size: The chunk size to use options are:
* D = Days
* M = Months
* Y = Years
:param remove_ticker: If true the ticker column will be deleted before the
:class:`pd.DataFrame` is returned, otherwise it will remain which is
going to use more memory than required.
:return: The output of the original function.
"""
def wrapper(f):
@wraps(f)
def eval_and_write(*args, **kwargs):
df_lib_name = f(*args, **kwargs)
df = df_lib_name.df
lib_name = df_lib_name.lib_name
try:
# TODO: make this use the fast scalar getter
ticker = df[utils.TICKER_COL][0]
# ticker = df.at[0, pd_utils.TICKER_COL]
except KeyError:
raise PyInvestmentKeyError(
'Decorated functions are required to add a column '
f'{utils.TICKER_COL} that contains the ticker.')
if remove_ticker:
# should this be saved?
df.drop(utils.TICKER_COL, axis=1, inplace=True)
# this is a work around for a flaw in the the arctic DateChunker.
if 'date' not in df.columns or 'date' not in df.index.names:
if df.index.dtype == pd.to_datetime(['2017']).dtype:
df.index.name = 'date'
else:
raise ValueError('df must be datetime indexed or have a'
'column named "date".')
if lib_name not in ARCTIC_STORE.list_libraries():
# create the lib if it does not already exist
ARCTIC_STORE.initialize_library(lib_name,
BarStore.LIBRARY_TYPE)
lib = ARCTIC_STORE[lib_name]
if not isinstance(lib, ChunkStore):
raise InvalidStoreError(required=ChunkStore,
provided=type(lib))
else:
lib.update(ticker, df, chunk_size=chunk_size, upsert=True)
df.index.freq = BDay()
return DfLibName(df, lib_name)
return eval_and_write
return wrapper
class lazy_property(object):
"""
Used for lazy evaluation of an obj attr.
Property should represent non-mutable data, as it replaces itself.
"""
def __init__(self, f):
self.f = f
self.func_name = f.__name__
def __get__(self, obj, cls):
if obj is None:
return None
val = self.f(obj)
setattr(obj, self.func_name, val)
return val
| {
"repo_name": "kprestel/PyInvestment",
"path": "pytech/decorators/decorators.py",
"copies": "2",
"size": "4087",
"license": "mit",
"hash": 7719846952224752000,
"line_mean": 31.1811023622,
"line_max": 78,
"alpha_frac": 0.576951309,
"autogenerated": false,
"ratio": 4.087,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 127
} |
from functools import wraps
import pickle
from .backends import LocalBackend, MemcacheBackend
from .decorators import CachedFunctionDecorator, CachedListFunctionDecorator
from .utils import default_cache_key_func
from .batcher import Batcher
from .exceptions import InvalidHookEventException, OutOfBatcherContextRegistrationException
class Cacher(object):
"""
Cacher is the the main object of the whole pycache packages,
which encapsulates connection with the Memcached client.
Example instantiation::
import pycacher
cacher = pycacher.Cacher('localhost', 11211)
By default, Cacher would be instantiated with `MemcacheBackend`. It's possible
to instantiate Cacher manually with a different backend, such as LocalBackend.
Here's how you do it::
import pycacher
from pycacher.backends import LocalBackend
cacher = pycacher.Cacher(backend=LocalBackend())
"""
def __init__(self, host='localhost', port=11211, client=None,
backend=None, default_expires=None,
cache_key_func=default_cache_key_func):
self.cache_key_func = cache_key_func
if backend:
self.backend = backend
else:
self.backend = MemcacheBackend(host=host, port=port)
self._batcher_ctx_stack = []
self._hooks = {'call':[], 'invalidate':[], 'register':[]}
def cache(self, expires=None):
"""Decorates a function to be cacheable.
Example usage::
@cacher.cache(expires=None)
def expensive_function(a, b):
pass
"""
def decorator(f):
#Wraps the function within a function decorator
return CachedFunctionDecorator(f, cacher=self, expires=expires,
cache_key_func=self.cache_key_func)
return decorator
def cache_list(self, range=10, skip_key="skip", limit_key="limit", expires=None):
"""Decorates a function that returns a list as a return value to be cacheable.
Example usage::
@cacher.cache_list(range=10)
def expensive_function(a, skip=None, limit=None):
pass
"""
def decorator(f):
return CachedListFunctionDecorator(f, cacher=self, expires=expires,
cache_key_func=self.cache_key_func,
range=range, skip_key=skip_key,
limit_key=limit_key)
return decorator
def create_batcher(self):
"""Simply creates a Batcher instance."""
return Batcher(self)
def push_batcher(self, batcher):
self._batcher_ctx_stack.append(batcher)
def get_current_batcher(self):
if len(self._batcher_ctx_stack) > 0:
return self._batcher_ctx_stack[-1]
else:
return None
def pop_batcher(self):
return self._batcher_ctx_stack.pop()
def get_batcher_stack_depth(self):
return len(self._batcher_ctx_stack)
def add_hook(self, event, fn):
""" Add hook function to be executed on event.
Example usage::
def on_cacher_invalidate(key):
pass
cacher.add_hook('invalidate', on_cacher_invalidate)
"""
if event not in ('invalidate', 'call', 'register'):
raise InvalidHookEventException(\
"Hook event must be 'invalidate', 'call', or 'register'")
self._hooks[event].append(fn)
def trigger_hooks(self, event, *args, **kwargs):
if event not in ('invalidate', 'call', 'register'):
raise InvalidHookEventException(\
"Hook event must be 'invalidate', 'call', or 'register'")
for fn in self._hooks[event]:
fn(*args, **kwargs)
def get(self, key):
return pickle.loads(self.backend.get(key))
def set(self, key, value):
return self.backend.set(key, pickle.dumps(value))
def delete(self, key):
return self.backend.delete(key)
| {
"repo_name": "garindra/pycacher",
"path": "pycacher/cacher.py",
"copies": "1",
"size": "4282",
"license": "mit",
"hash": -5680126116662343000,
"line_mean": 29.5857142857,
"line_max": 91,
"alpha_frac": 0.5765997198,
"autogenerated": false,
"ratio": 4.391794871794872,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.017129950710583028,
"num_lines": 140
} |
from functools import wraps
import pyavali
from .argumentpickers import create_argument_picker
class validate(object):
"""Validates a function argument by calling validator with the argument.
@validate(0, int)
def foo(param):
...
or
@validate("param", int, "must be int")
def foo(param)
...
argument_index_or_name - Index of the arguments in args array or name of the argument
validator - Callable, which takes the param
message - Message set to the raised exception
Raises ValidatorException if validator returned false or raised error
"""
def __init__(self, argument_index_or_name, validator, message=None):
self._argument_name = str(argument_index_or_name)
self._argument_picker = create_argument_picker(argument_index_or_name)
self._validator = validator
self._message = message
def __call__(self, func):
self._validator = Validator(self._validator, func.__name__, self._argument_name, self._message)
@wraps(func)
def decorator_callable(*args, **kwargs):
argument = self._argument_picker.argument(func, *args, **kwargs)
self._validator.validate(argument)
return func(*args, **kwargs)
return decorator_callable
class validate_param(validate):
pass
class Validator(object):
def __init__(self, validator, function_name, argument_name, message=None):
self._validator = validator
self._message = message
self._argument_name = argument_name
self._argument_index = 0
self._function_name = function_name
def validate(self, argument):
if self._validate(argument) == False:
self._raise_validation_failed(argument)
def _validate(self, argument):
try:
return self._validator(argument)
except:
return False
def _raise_validation_failed(self, argument):
message = None
if self._message:
message = self._message.format(argument)
elif hasattr(self._validator, "message"):
message = self._validator.message(argument)
message = "validation failed for '%s': '%s' %s" % (self._function_name, self._argument_name, message)
raise pyavali.ValidationFailed(argument, self._argument_index, message)
| {
"repo_name": "mlackman/pyavali",
"path": "pyavali/decorators.py",
"copies": "1",
"size": "2175",
"license": "apache-2.0",
"hash": -3368731539542886000,
"line_mean": 28.3918918919,
"line_max": 107,
"alpha_frac": 0.6937931034,
"autogenerated": false,
"ratio": 3.9617486338797816,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5155541737279781,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import pytz
import heapq
from annoying.functions import get_object_or_None
from django.core.exceptions import PermissionDenied
from django.utils import timezone
from schedule.conf.settings import CHECK_EVENT_PERM_FUNC, CHECK_CALENDAR_PERM_FUNC
class EventListManager(object):
"""
This class is responsible for doing functions on a list of events. It is
used to when one has a list of events and wants to access the occurrences
from these events in as a group
"""
def __init__(self, events):
self.events = events
def occurrences_after(self, after=None, tzinfo=pytz.utc):
"""
It is often useful to know what the next occurrence is given a list of
events. This function produces a generator that yields the
the most recent occurrence after the date ``after`` from any of the
events in ``self.events``
"""
from schedule.models import Occurrence
if after is None:
after = timezone.now()
occ_replacer = OccurrenceReplacer(
Occurrence.objects.filter(event__in=self.events))
generators = [event._occurrences_after_generator(after) for event in self.events]
occurrences = []
for generator in generators:
try:
heapq.heappush(occurrences, (generator.next(), generator))
except StopIteration:
pass
while True:
if len(occurrences) == 0:
raise StopIteration
generator = occurrences[0][1]
try:
next = heapq.heapreplace(occurrences, (generator.next(), generator))[0]
except StopIteration:
next = heapq.heappop(occurrences)[0]
yield occ_replacer.get_occurrence(next)
class OccurrenceReplacer(object):
"""
When getting a list of occurrences, the last thing that needs to be done
before passing it forward is to make sure all of the occurrences that
have been stored in the datebase replace, in the list you are returning,
the generated ones that are equivalent. This class makes this easier.
"""
def __init__(self, persisted_occurrences):
lookup = [((occ.event, occ.original_start, occ.original_end), occ) for
occ in persisted_occurrences]
self.lookup = dict(lookup)
def get_occurrence(self, occ):
"""
Return a persisted occurrences matching the occ and remove it from lookup since it
has already been matched
"""
return self.lookup.pop(
(occ.event, occ.original_start, occ.original_end),
occ)
def has_occurrence(self, occ):
return (occ.event, occ.original_start, occ.original_end) in self.lookup
def get_additional_occurrences(self, start, end):
"""
Return persisted occurrences which are now in the period
"""
return [occ for key, occ in self.lookup.items() if (occ.start < end and occ.end >= start and not occ.cancelled)]
def check_event_permissions(function):
@wraps(function)
def decorator(request, *args, **kwargs):
from schedule.models import Event, Calendar
user = request.user
# check event permission
event = get_object_or_None(Event, pk=kwargs.get('event_id', None))
allowed = CHECK_EVENT_PERM_FUNC(event, user)
if not allowed:
raise PermissionDenied
# check calendar permissions
calendar = None
if event:
calendar = event.calendar
elif 'calendar_slug' in kwargs:
calendar = Calendar.objects.get(slug=kwargs['calendar_slug'])
allowed = CHECK_CALENDAR_PERM_FUNC(calendar, user)
if not allowed:
raise PermissionDenied
# all checks passed
return function(request, *args, **kwargs)
return decorator
def coerce_date_dict(date_dict):
"""
given a dictionary (presumed to be from request.GET) it returns a tuple
that represents a date. It will return from year down to seconds until one
is not found. ie if year, month, and seconds are in the dictionary, only
year and month will be returned, the rest will be returned as min. If none
of the parts are found return an empty tuple.
"""
keys = ['year', 'month', 'day', 'hour', 'minute', 'second']
ret_val = {
'year': 1,
'month': 1,
'day': 1,
'hour': 0,
'minute': 0,
'second': 0}
modified = False
for key in keys:
try:
ret_val[key] = int(date_dict[key])
modified = True
except KeyError:
break
return modified and ret_val or {}
| {
"repo_name": "erezlife/django-scheduler",
"path": "schedule/utils.py",
"copies": "1",
"size": "4739",
"license": "bsd-3-clause",
"hash": -3897363481879476700,
"line_mean": 33.3405797101,
"line_max": 120,
"alpha_frac": 0.6254484068,
"autogenerated": false,
"ratio": 4.27321911632101,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.007636060971338841,
"num_lines": 138
} |
from functools import wraps
import pytz
import heapq
from annoying.functions import get_object_or_None
from django.http import HttpResponseRedirect
from django.conf import settings
from django.utils import timezone
from schedule.conf.settings import CHECK_EVENT_PERM_FUNC, CHECK_CALENDAR_PERM_FUNC
class EventListManager(object):
"""
This class is responsible for doing functions on a list of events. It is
used to when one has a list of events and wants to access the occurrences
from these events in as a group
"""
def __init__(self, events):
self.events = events
def occurrences_after(self, after=None, tzinfo=pytz.utc):
"""
It is often useful to know what the next occurrence is given a list of
events. This function produces a generator that yields the
the most recent occurrence after the date ``after`` from any of the
events in ``self.events``
"""
from schedule.models import Occurrence
if after is None:
after = timezone.now()
occ_replacer = OccurrenceReplacer(
Occurrence.objects.filter(event__in=self.events))
generators = [event._occurrences_after_generator(after) for event in self.events]
occurrences = []
for generator in generators:
try:
heapq.heappush(occurrences, (generator.next(), generator))
except StopIteration:
pass
while True:
if len(occurrences) == 0:
raise StopIteration
generator = occurrences[0][1]
try:
next = heapq.heapreplace(occurrences, (generator.next(), generator))[0]
except StopIteration:
next = heapq.heappop(occurrences)[0]
yield occ_replacer.get_occurrence(next)
class OccurrenceReplacer(object):
"""
When getting a list of occurrences, the last thing that needs to be done
before passing it forward is to make sure all of the occurrences that
have been stored in the datebase replace, in the list you are returning,
the generated ones that are equivalent. This class makes this easier.
"""
def __init__(self, persisted_occurrences):
lookup = [((occ.event, occ.original_start, occ.original_end), occ) for
occ in persisted_occurrences]
self.lookup = dict(lookup)
def get_occurrence(self, occ):
"""
Return a persisted occurrences matching the occ and remove it from lookup since it
has already been matched
"""
return self.lookup.pop(
(occ.event, occ.original_start, occ.original_end),
occ)
def has_occurrence(self, occ):
return (occ.event, occ.original_start, occ.original_end) in self.lookup
def get_additional_occurrences(self, start, end):
"""
Return persisted occurrences which are now in the period
"""
return [occ for key, occ in self.lookup.items() if (occ.start < end and occ.end >= start and not occ.cancelled)]
def check_event_permissions(function):
@wraps(function)
def decorator(request, *args, **kwargs):
from schedule.models import Event, Calendar
user = request.user
# check event permission
event = get_object_or_None(Event, pk=kwargs.get('event_id', None))
allowed = CHECK_EVENT_PERM_FUNC(event, user)
if not allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# check calendar permissions
calendar = None
if event:
calendar = event.calendar
elif 'calendar_slug' in kwargs:
calendar = Calendar.objects.get(slug=kwargs['calendar_slug'])
allowed = CHECK_CALENDAR_PERM_FUNC(calendar, user)
if not allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# all checks passed
return function(request, *args, **kwargs)
return decorator
def coerce_date_dict(date_dict):
"""
given a dictionary (presumed to be from request.GET) it returns a tuple
that represents a date. It will return from year down to seconds until one
is not found. ie if year, month, and seconds are in the dictionary, only
year and month will be returned, the rest will be returned as min. If none
of the parts are found return an empty tuple.
"""
keys = ['year', 'month', 'day', 'hour', 'minute', 'second']
ret_val = {
'year': 1,
'month': 1,
'day': 1,
'hour': 0,
'minute': 0,
'second': 0}
modified = False
for key in keys:
try:
ret_val[key] = int(date_dict[key])
modified = True
except KeyError:
break
return modified and ret_val or {}
| {
"repo_name": "jrutila/django-schedule",
"path": "schedule/utils.py",
"copies": "2",
"size": "4815",
"license": "bsd-3-clause",
"hash": 2540940516127492000,
"line_mean": 33.6402877698,
"line_max": 120,
"alpha_frac": 0.6288681205,
"autogenerated": false,
"ratio": 4.2686170212765955,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0075811252808975545,
"num_lines": 139
} |
from functools import wraps
import pytz
import heapq
import datetime
from annoying.functions import get_object_or_None
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.conf import settings
from django.utils import timezone
from django.utils.module_loading import import_string
from schedule.conf.settings import (
CHECK_EVENT_PERM_FUNC,
CHECK_CALENDAR_PERM_FUNC,
CHECK_OCCURRENCE_PERM_FUNC,
CALENDAR_VIEW_PERM)
from django.db.models import F
class EventListManager(object):
"""
This class is responsible for doing functions on a list of events. It is
used to when one has a list of events and wants to access the occurrences
from these events in as a group
"""
def __init__(self, events):
self.events = events
def occurrences_after(self, after=None, tzinfo=pytz.utc):
"""
It is often useful to know what the next occurrence is given a list of
events. This function produces a generator that yields the
the most recent occurrence after the date ``after`` from any of the
events in ``self.events``
"""
from schedule.models import Occurrence
if after is None:
after = timezone.now()
occ_replacer = OccurrenceReplacer(
Occurrence.objects.filter(event__in=self.events))
generators = [event._occurrences_after_generator(after) for event in self.events]
occurrences = []
for generator in generators:
try:
heapq.heappush(occurrences, (next(generator), generator))
except StopIteration:
pass
while True:
if len(occurrences) == 0:
raise StopIteration
generator = occurrences[0][1]
try:
next_occurence = heapq.heapreplace(occurrences, (next(generator), generator))[0]
except StopIteration:
next_occurence = heapq.heappop(occurrences)[0]
yield occ_replacer.get_occurrence(next_occurence)
class OccurrenceReplacer(object):
"""
When getting a list of occurrences, the last thing that needs to be done
before passing it forward is to make sure all of the occurrences that
have been stored in the datebase replace, in the list you are returning,
the generated ones that are equivalent. This class makes this easier.
"""
def __init__(self, persisted_occurrences):
lookup = [((occ.event, occ.original_start, occ.original_end), occ) for
occ in persisted_occurrences]
self.lookup = dict(lookup)
def get_occurrence(self, occ):
"""
Return a persisted occurrences matching the occ and remove it from lookup since it
has already been matched
"""
return self.lookup.pop(
(occ.event, occ.original_start, occ.original_end),
occ)
def has_occurrence(self, occ):
try:
return (occ.event, occ.original_start, occ.original_end) in self.lookup
except TypeError:
if not self.lookup:
return False
else:
raise TypeError('A problem with checking if a persisted occurence exists has occured!')
def get_additional_occurrences(self, start, end):
"""
Return persisted occurrences which are now in the period
"""
return [occ for _, occ in list(self.lookup.items()) if (occ.start < end and occ.end >= start and not occ.cancelled)]
def get_occurrence(request, *args, **kwargs):
from schedule.models import Occurrence
occurrence = None
if 'occurrence_id' in kwargs:
occurrence = get_object_or_None(Occurrence,
id=kwargs['occurrence_id'])
elif request.GET:
occurrence = get_object_or_None(Occurrence,
id=request.GET.get('occurrence_id', None))
elif request.POST:
occurrence = get_object_or_None(Occurrence,
id=request.POST.get('occurrence_id', None))
return occurrence
def get_event(occurrence, request, *args, **kwargs):
from schedule.models import Event
event = None
if occurrence:
event = occurrence.event
elif 'event_id' in kwargs:
event = get_object_or_None(Event,
id=kwargs['event_id'])
elif request.GET:
event = get_object_or_None(Event,
id=request.GET.get('event_id', None))
elif request.POST:
event = get_object_or_None(Event,
id=request.POST.get('event_id', None))
return event
def get_calendar(event, request, *args, **kwargs):
from schedule.models import Calendar
calendar = None
if event:
calendar = event.calendar
elif 'calendar_slug' in kwargs:
calendar = get_object_or_None(Calendar,
slug=kwargs['calendar_slug'])
elif request.GET:
calendar = get_object_or_None(Calendar,
slug=request.GET.get('calendar_slug', None))
elif request.POST:
calendar = get_object_or_None(Calendar,
slug=request.POST.get('calendar_slug', None))
return calendar
def get_objects(request, *args, **kwargs):
occurrence = get_occurrence(request, *args, **kwargs)
event = get_event(occurrence, request, *args, **kwargs)
calendar = get_calendar(event, request, *args, **kwargs)
return occurrence, event, calendar
def check_occurrence_permissions(function):
@wraps(function)
def decorator(request, *args, **kwargs):
from schedule.models import Event, Calendar, Occurrence
user = request.user
if not user:
return HttpResponseRedirect(settings.LOGIN_URL)
occurrence, event, calendar = get_objects(request, *args, **kwargs)
if calendar and event:
allowed = (CHECK_EVENT_PERM_FUNC(event, user) and \
CHECK_CALENDAR_PERM_FUNC(calendar, user) and \
CHECK_OCCURRENCE_PERM_FUNC(occurrence, user))
if not allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# all checks passed
return function(request, *args, **kwargs)
return HttpResponseNotFound('<h1>Page not found</h1>')
return decorator
def check_event_permissions(function):
@wraps(function)
def decorator(request, *args, **kwargs):
from schedule.models import Event, Calendar, Occurrence
user = request.user
if not user:
return HttpResponseRedirect(settings.LOGIN_URL)
occurrence, event, calendar = get_objects(request, *args, **kwargs)
if calendar:
allowed = (CHECK_EVENT_PERM_FUNC(event, user) and \
CHECK_CALENDAR_PERM_FUNC(calendar, user))
if not allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# all checks passed
return function(request, *args, **kwargs)
return HttpResponseNotFound('<h1>Page not found</h1>')
return decorator
def check_event_deletable(user, calendar, event):
allowed = (CHECK_EVENT_PERM_FUNC(event, user) and \
CHECK_CALENDAR_PERM_FUNC(calendar, user))
if allowed:
for occurrence in event.occurrence_set.all():
allowed = CHECK_OCCURRENCE_PERM_FUNC(
occurrence, user)
if not allowed:
break
return allowed
def check_event_delete_permissions(function):
@wraps(function)
def decorator(request, *args, **kwargs):
from schedule.models import Event, Calendar, Occurrence
user = request.user
if not user:
return HttpResponseRedirect(settings.LOGIN_URL)
occurrence, event, calendar = get_objects(request, *args, **kwargs)
if calendar:
allowed = check_event_deletable(user, calendar, event)
if not allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# all checks passed
return function(request, *args, **kwargs)
return HttpResponseNotFound('<h1>Page not found</h1>')
return decorator
def check_calendar_permissions(function):
@wraps(function)
def decorator(request, *args, **kwargs):
if CALENDAR_VIEW_PERM:
from schedule.models import Event, Calendar, Occurrence
user = request.user
if not user:
return HttpResponseRedirect(settings.LOGIN_URL)
occurrence, event, calendar = get_objects(request, *args, **kwargs)
if calendar:
allowed = CHECK_CALENDAR_PERM_FUNC(calendar, user)
if not allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# all checks passed
return function(request, *args, **kwargs)
return HttpResponseNotFound('<h1>Page not found</h1>')
return function(request, *args, **kwargs)
return decorator
def coerce_date_dict(date_dict):
"""
given a dictionary (presumed to be from request.GET) it returns a tuple
that represents a date. It will return from year down to seconds until one
is not found. ie if year, month, and seconds are in the dictionary, only
year and month will be returned, the rest will be returned as min. If none
of the parts are found return an empty tuple.
"""
keys = ['year', 'month', 'day', 'hour', 'minute', 'second']
ret_val = {
'year': 1,
'month': 1,
'day': 1,
'hour': 0,
'minute': 0,
'second': 0}
modified = False
for key in keys:
try:
ret_val[key] = int(date_dict[key])
modified = True
except KeyError:
break
return modified and ret_val or {}
def get_model_bases():
from django.db.models import Model
baseStrings = getattr(settings, 'SCHEDULER_BASE_CLASSES', None)
if baseStrings is None:
return [Model]
else:
return [import_string(x) for x in baseStrings]
#def update_all_occurrences(new_event, old_event, occs):
def split_event_after(event, after):
from schedule.models import Event
old_event = Event.objects.get(pk=event.pk)
if old_event.rule or event.rule:
event.pk = None
event.save()
if old_event.rule:
try:
new_start = next(old_event.occurrences_after(after)).start
except StopIteration:
new_start = None
old_event.end_recurring_period = after
old_event.save()
if event.rule:
try:
new_start = next(event.occurrences_after(after)).start
except StopIteration:
new_start = None
if new_start:
length = datetime.timedelta(minutes=
int((event.end-event.start).total_seconds() / 60)
)
time_diff = datetime.timedelta(minutes=
int((event.start-old_event.start).total_seconds() / 60)
)
event.start = new_start
event.end = new_start + length
old_event.occurrence_set.filter(
start__gte = after
).update(
event=event,
original_start = event.start,
original_end = event.end,
start=F('start') + time_diff,
end=F('start') + length + time_diff
)
event.save() | {
"repo_name": "nwaxiomatic/django-scheduler",
"path": "schedule/utils.py",
"copies": "1",
"size": "11438",
"license": "bsd-3-clause",
"hash": -6739619643359573000,
"line_mean": 35.9,
"line_max": 124,
"alpha_frac": 0.6129568106,
"autogenerated": false,
"ratio": 4.183613752743233,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.003654044475093783,
"num_lines": 310
} |
from functools import wraps
import random
import copy
from axelrod import Actions, flip_action
from .game import DefaultGame
C, D = Actions.C, Actions.D
# Strategy classifiers
def is_basic(s):
"""
Defines criteria for a strategy to be considered 'basic'
"""
stochastic = s.classifier['stochastic']
depth = s.classifier['memory_depth']
inspects_source = s.classifier['inspects_source']
manipulates_source = s.classifier['manipulates_source']
manipulates_state = s.classifier['manipulates_state']
return (not stochastic) and (not inspects_source) and (not manipulates_source) and (not manipulates_state) and (depth in (0, 1))
def obey_axelrod(s):
"""
A function to check if a strategy obeys Axelrod's original tournament rules.
"""
classifier = s.classifier
return not (
classifier['inspects_source'] or
classifier['manipulates_source'] or
classifier['manipulates_state'])
def update_history(player, move):
"""Updates histories and cooperation / defections counts following play."""
# Update histories
player.history.append(move)
# Update player counts of cooperation and defection
if move == C:
player.cooperations += 1
elif move == D:
player.defections += 1
def init_args(func):
"""Decorator to simplify the handling of init_args. Use whenever overriding
Player.__init__ in subclasses of Player that require arguments as follows:
@init_args
def __init__(self, myarg1, ...)
"""
@wraps(func)
def wrapper(self, *args, **kwargs):
r = func(self, *args, **kwargs)
self.init_args = args
return r
return wrapper
class Player(object):
"""A class for a player in the tournament.
This is an abstract base class, not intended to be used directly.
"""
name = "Player"
classifier = {}
default_classifier = {
'stochastic': False,
'memory_depth': float('inf'),
'makes_use_of': None,
'inspects_source': None,
'manipulates_source': None,
'manipulates_state': None
}
def __init__(self):
"""Initiates an empty history and 0 score for a player."""
self.history = []
self.classifier = copy.deepcopy(self.classifier)
if self.name == "Player":
self.classifier['stochastic'] = False
for dimension in self.default_classifier:
if dimension not in self.classifier:
self.classifier[dimension] = self.default_classifier[dimension]
self.cooperations = 0
self.defections = 0
self.init_args = ()
self.set_match_attributes()
def receive_match_attributes(self):
# Overwrite this function if your strategy needs
# to make use of match_attributes such as
# the game matrix, the number of rounds or the noise
pass
def set_match_attributes(self, length=-1, game=None, noise=0):
if not game:
game = DefaultGame
self.match_attributes = {
"length": length,
"game": game,
"noise": noise
}
self.receive_match_attributes()
def __repr__(self):
"""The string method for the strategy."""
return self.name
@staticmethod
def _add_noise(noise, s1, s2):
r = random.random()
if r < noise:
s1 = flip_action(s1)
r = random.random()
if r < noise:
s2 = flip_action(s2)
return s1, s2
def strategy(self, opponent):
"""This is a placeholder strategy."""
raise NotImplementedError()
def play(self, opponent, noise=0):
"""This pits two players against each other."""
s1, s2 = self.strategy(opponent), opponent.strategy(self)
if noise:
s1, s2 = self._add_noise(noise, s1, s2)
update_history(self, s1)
update_history(opponent, s2)
def clone(self):
"""Clones the player without history, reapplying configuration
parameters as necessary."""
# You may be tempted to reimplement using the `copy` module
# Note that this would require a deepcopy in some cases and there may
# be significant changes required throughout the library.
# Consider overriding in special cases only if necessary
cls = self.__class__
new_player = cls(*self.init_args)
new_player.match_attributes = copy.copy(self.match_attributes)
return new_player
def reset(self):
"""Resets history.
When creating strategies that create new attributes then this method should be
re-written (in the inherited class) and should not only reset history but also
rest all other attributes.
"""
self.history = []
self.cooperations = 0
self.defections = 0
| {
"repo_name": "ranjinidas/Axelrod",
"path": "axelrod/player.py",
"copies": "1",
"size": "4878",
"license": "mit",
"hash": 2827035234594652700,
"line_mean": 29.679245283,
"line_max": 132,
"alpha_frac": 0.6193111931,
"autogenerated": false,
"ratio": 4.130397967823878,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5249709160923879,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import random
from django.test import TestCase
from tt_streams import factories as streams
from . import factories
from . import models
def with_stream_stories_and_videos(func):
@wraps(func)
def inner(self, *args, **kwargs):
stream = streams.StreamFactory.create()
stories = [factories.StoryFactory.create() for a in range(2)]
for s in stories:
factories.StoryItemFactory.create(stream=stream, story=s)
videos = [factories.VideoFactory.create() for a in range(2)]
for v in videos:
factories.VideoItemFactory.create(stream=stream, video=v)
return func(self, stream, stories, videos, *args, **kwargs)
return inner
class BasicUsageTestCase(TestCase):
def test_can_find_stories_in_stream(self):
stream = streams.StreamFactory.create()
stories = [factories.StoryFactory.create() for a in range(2)]
for s in stories:
factories.StoryItemFactory.create(stream=stream, story=s)
self.assertEqual(stream.items.count(), 2)
def test_can_find_videos_in_stream(self):
stream = streams.StreamFactory.create()
videos = [factories.VideoFactory.create() for a in range(2)]
for v in videos:
factories.VideoItemFactory.create(stream=stream, video=v)
self.assertEqual(stream.items.count(), 2)
@with_stream_stories_and_videos
def test_can_find_both_videos_and_stories(self, stream, stories, videos):
self.assertEqual(stream.items.count(), 4)
@with_stream_stories_and_videos
def test_returns_full_classes(self, stream, stories, videos):
for s in models.StoryItem.objects.all():
self.assert_(s in stream.items.select_subclasses().all())
for v in models.VideoItem.objects.all():
self.assert_(v in stream.items.select_subclasses().all())
class StoryItemTestCase(TestCase):
def test_syncs_title_on_save(self):
r = random.randint(1000, 2000)
stream = streams.StreamFactory.create()
story = factories.StoryFactory.create(title="Some Great Story %s" % r)
item = factories.StoryItemFactory.create(story=story, stream=stream)
self.assertEqual(item.title, story.title)
def test_signal_works_as_well(self):
r = random.randint(1000, 2000)
stream = streams.StreamFactory.create()
story = factories.StoryFactory.create(title="Some Great Story %s" % r)
item = factories.StoryItemFactory.create(story=story, stream=stream)
story.title = 'Some Other Great Story %d' % r
story.save()
item = models.StoryItem.objects.get(pk=item.pk)
self.assertEqual(item.title, story.title)
| {
"repo_name": "texastribune/tt_streams",
"path": "example/app/tests.py",
"copies": "1",
"size": "2735",
"license": "apache-2.0",
"hash": 428332087722356160,
"line_mean": 35.4666666667,
"line_max": 78,
"alpha_frac": 0.6702010969,
"autogenerated": false,
"ratio": 3.7363387978142075,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49065398947142075,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import random, string, json
from flask import Blueprint, request, redirect, Response, render_template, flash, url_for, current_app, session
from forms import AddForm, LoginForm
import database_helper
admin_page = Blueprint("admin", __name__)
# General helpers
def get_random_shortcode(length):
return ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(length))
def url_for_code(shortcode, target_url):
return "<a href=\"%s\">%s</a>" % (url_for("redirect.target", shortcode=shortcode), target_url)
# Blueprint stuff
def check_auth(username, password):
return username == current_app.config["ADMIN_USER"] and password == current_app.config["ADMIN_PASS"]
def check_session_auth():
return "authorization" in session and session["authorization"] == "Yes"
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
if not check_session_auth():
return redirect(url_for(".login"))
return f(*args, **kwargs)
return decorated
@admin_page.before_request
def require_https():
if not "dev_mode" in current_app.config and request.headers.get("X-Forwarded-Proto", "http") != "https":
return redirect(request.url.replace("http://", "https://"))
# Routes
@admin_page.route("/")
@requires_auth
def index():
return render_template("admin_index.html")
@admin_page.route("/login", methods=["GET", "POST"])
def login():
if check_session_auth():
return redirect(url_for(".index"))
form = LoginForm(request.form)
form.login_validator = check_auth
if request.method != "POST" or not form.validate():
return render_template("admin_login.html", login_form=form)
# Login successful
session["authorization"] = "Yes"
return redirect(url_for(".index"))
@admin_page.route("/list")
@requires_auth
def list():
results = database_helper.get_all()
random = []
nonrandom = []
for r in results:
if r.is_random:
pool = random
else:
pool = nonrandom
pool.append({
"shortcode": r.shortcode,
"target_url": r.target_url
})
dump = {"random": random, "nonrandom": nonrandom}
return render_template("admin_list.html", results=dump)
@admin_page.route("/export")
@requires_auth
def export():
results = database_helper.get_all()
arr = map(lambda a: { "is_random": a.is_random, "shortcode": a.shortcode, "target_url": a.target_url }, results)
js = json.dumps(arr)
return Response(js, 200, {"Content-Type": "application/json"})
@admin_page.route("/add", methods=["GET", "POST"])
@requires_auth
def add():
form = AddForm(request.form)
if request.method != "POST" or not form.validate():
return render_template("admin_add.html", add_form=form)
shortcode = form.shortcode.data
target_url = form.target_url.data
random = form.is_random.data
if (not shortcode or len(shortcode) == 0) and not random:
return abort("No shortcode specified.", 400)
if random:
# Make sure the target doesn't already have a random shortcode
target = database_helper.find_by_target(target_url)
if target and target.is_random:
flash("Shortcode '%s' for this URL already exists. %s" % (target.shortcode, url_for_code(target.shortcode, target.target_url)), category="info")
return render_template("admin_add.html", add_form=form)
# Find an unused random shortcode
count = 0
while True:
shortcode = get_random_shortcode(current_app.config["SHORTCODE_LENGTH"])
if database_helper.get_shortcode_target(shortcode) is None:
break
# Make sure we don't loop endlessly
count = count + 1
if count > 4:
flash("Could not find usable shortcode after 5 tries.", category="danger")
return render_template("admin_add.html", add_form=form)
else:
# Make sure this shortcode doesn't already exist
target = database_helper.get_shortcode_target(shortcode)
if target:
flash("Shortcode '%s' already exists to %s." % (shortcode, target.target_url), category="warning")
return render_template("admin_add.html", add_form=form)
if database_helper.insert_shortcode(shortcode, target_url, random):
msg = "Shortcode '%s' added successfully. %s" % (shortcode, url_for_code(shortcode, target_url))
category = "success"
else:
msg = "Failed to create shortcode."
category = "danger"
flash(msg, category)
return redirect(url_for(".add"))
@admin_page.route("/edit/<shortcode>", methods=["GET", "POST"])
@requires_auth
def edit(shortcode):
result = database_helper.get_shortcode_target(shortcode)
if result is None:
return abort(404)
form = AddForm(request.form, obj=result)
if request.method != "POST" or not form.validate():
return render_template("admin_edit.html", add_form=form, data=result)
target_url = form.target_url.data
if database_helper.update_shortcode(shortcode, target_url):
msg = "Shortcode '%s' updated successfully. %s" % (shortcode, url_for_code(shortcode, target_url))
category = "success"
else:
msg = "Failed to update shortcode."
category = "danger"
flash(msg, category)
return redirect(url_for(".list"))
@admin_page.route("/delete/<shortcode>", methods=["GET", "POST"])
@requires_auth
def delete(shortcode):
result = database_helper.get_shortcode_target(shortcode)
if result is None:
return abort(404)
if request.method != "POST":
return render_template("admin_delete.html", data=result)
if database_helper.delete_shortcode(shortcode):
msg = "Shortcode '%s' was deleted." % (shortcode)
category = "success"
else:
msg = "Failed to remove shortcode."
category = "warn"
flash(msg, category=category)
return redirect(url_for(".list"))
| {
"repo_name": "AndrewNeo/hybridius",
"path": "admin.py",
"copies": "1",
"size": "6102",
"license": "mit",
"hash": 1139945413780670000,
"line_mean": 29.8181818182,
"line_max": 156,
"alpha_frac": 0.6401179941,
"autogenerated": false,
"ratio": 3.700424499696786,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4840542493796786,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import re
from typing import Optional, Callable, Union, Pattern, Any
from .threads import JobThread
# utility functions for doing dynamic replacements in matches
_replace_format = re.compile(r':(\w*):')
def _replace_match(self, match) -> str:
if match[1] in self.config['replace']:
val = self.config['replace'][match[1]]
if callable(val):
return str(val(self))
else:
return str(val)
else:
return ''
def _replace(self, original: Union[Pattern, str]) -> str:
if isinstance(original, Pattern):
matcher = re.sub(_replace_format, self._replace_match, original.pattern)
return re.compile(matcher)
else:
return re.sub(_replace_format, self._replace_match, original)
# Special hook that allows the related function to be called from any thread
# and then execute in the bot's actual thread.
# Basiclally it queues the function to be ran on the bot's own terms
def queue():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
@wraps(func)
def wrapped_command(*args, **kwargs):
args[0].queue(func, *args, **kwargs)
return wrapped_command
return wrapped
# Hook that is triggered upon loading/reloading of
def load():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
@wraps(func)
def wrapped_command(*args, **kwargs):
return func(*args, **kwargs)
wrapped_command._type = 'LOAD'
return wrapped_command
return wrapped
def close():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
@wraps(func)
def wrapped_command(*args, **kwargs):
return func(*args, **kwargs)
wrapped_command._type = 'CLOSE'
return wrapped_command
return wrapped
def connect():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
@wraps(func)
def wrapped_command(*args, **kwargs):
return func(*args, **kwargs)
wrapped_command._type = 'CONNECT'
return wrapped_command
return wrapped
def disconnect():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
@wraps(func)
def wrapped_command(*args, **kwargs):
return func(*args, **kwargs)
wrapped_command._type = 'DISCONNECT'
return wrapped_command
return wrapped
### Hooks that trigger on common verbs
def ping():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
@wraps(func)
def wrapped_command(_self, info):
return func(_self, info)
wrapped_command._type = 'PING'
return wrapped_command
return wrapped
def pong():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
@wraps(func)
def wrapped_command(_self, info):
return func(_self, info)
wrapped_command._type = 'PONG'
return wrapped_command
return wrapped
def join():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
@wraps(func)
def wrapped_command(_self, info):
return func(_self, info)
wrapped_command._type = 'JOIN'
return wrapped_command
return wrapped
def nick():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
@wraps(func)
def wrapped_command(_self, info):
return func(_self, info)
wrapped_command._type = 'NICK'
return wrapped_command
return wrapped
def part():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
@wraps(func)
def wrapped_command(_self, info):
return func(_self, info)
wrapped_command._type = 'PART'
return wrapped_command
return wrapped
def quit():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
@wraps(func)
def wrapped_command(_self, info):
return func(_self, info)
wrapped_command._type = 'QUIT'
return wrapped_command
return wrapped
### Hooks that trigger on the PRVIMSG verb, custom match against the message
class command(object):
"""
TODO: Documentation
"""
def __init__(self, match: Optional[Union[str, Pattern]]=None):
self._match = match
def __call__(self, func: Callable):
# Default the command's name to an exact match of the function's name.
# ^func name$
message = self._match
if message is None:
message = re.compile('^{}$'.format(func.func_name.replace('_', ' ')))
@wraps(func)
def wrapped_command(_self, info):
if isinstance(message, Pattern):
info['match'] = _replace(_self, message).match(info['message'])
return func(_self, info)
wrapped_command._type = 'COMMAND'
wrapped_command._match = {'message': message}
return wrapped_command
class chancommand(object):
"""
TODO: Documentation
"""
def __init__(self, match: Optional[Union[str, Pattern]]=None):
self._match = match
def __call__(self, func: Callable):
# Default the command's name to an exact match of the function's name.
# ^func name$
message = self._match
if message is None:
message = re.compile('^{}$'.format(func.func_name.replace('_', ' ')))
@wraps(func)
def wrapped_command(_self, info):
if isinstance(message, Pattern):
info['match'] = _replace(_self, message).match(info['message'])
return func(_self, info)
wrapped_command._type = 'CHANCOMMAND'
wrapped_command._match = {'message': message}
return wrapped_command
class privcommand(object):
"""
TODO: Documentation
"""
def __init__(self, match: Optional[Union[str, Pattern]]=None):
self._match = match
def __call__(self, func: Callable):
# Default the command's name to an exact match of the function's name.
# ^func name$
message = self._match
if message is None:
message = re.compile('^{}$'.format(func.func_name.replace('_', ' ')))
@wraps(func)
def wrapped_command(_self, info):
if isinstance(message, Pattern):
info['match'] = _replace(_self, message).match(info['message'])
return func(_self, info)
wrapped_command._type = 'PRIVCOMMAND'
wrapped_command._match = {'message': message}
return wrapped_command
class privmsg(object):
"""
TODO: Documentation
"""
# verb - PRIVMSG
# Matches both direct and channel messages
def __init__(self, match: Optional[Union[str, Pattern]]=None):
self._match = match
def __call__(self, func):
# Default the command's name to an exact match of the function's name.
# ^func name$
message = self._match
if message is None:
message = re.compile('^{}$'.format(func.func_name.replace('_', ' ')))
@wraps(func)
def wrapped_command(_self, info):
if isinstance(message, Pattern):
info['match'] = _replace(_self, message).match(info['message'])
return func(_self, info)
wrapped_command._type = 'PRIVMSG'
wrapped_command._match = {'message': message}
return wrapped_command
class channel(object):
"""
TODO: Documentation
"""
# verb - PRIVMSG
# args[0] - starts with # or &
def __init__(self, match: Optional[Union[str, Pattern]]=None):
self._match = match
def __call__(self, func):
# Default the command's name to an exact match of the function's name.
# ^func name$
message = self._match
if message is None:
message = re.compile('^{}$'.format(func.func_name.replace('_', ' ')))
@wraps(func)
def wrapped_command(_self, info):
if isinstance(message, Pattern):
info['match'] = _replace(_self, message).match(info['message'])
return func(_self, info)
wrapped_command._type = 'CHANNEL'
wrapped_command._match = {'message': message}
return wrapped_command
class private(object):
"""
TODO: Documentation
"""
# verb - PRIVMSG
# args[0] - does /not/ start with a # or &
def __init__(self, match: Optional[Union[str, Pattern]]=None):
self._match = match
def __call__(self, func):
# Default the command's name to an exact match of the function's name.
# ^func name$
message = self._match
if message is None:
message = re.compile('^{}$'.format(func.func_name.replace('_', ' ')))
@wraps(func)
def wrapped_command(_self, info):
if isinstance(message, Pattern):
info['match'] = _replace(_self, message).match(info['message'])
return func(_self, info)
wrapped_command._type = 'PRIVATE'
wrapped_command._match = {'message': message}
return wrapped_command
class action(object):
"""
TODO: Documentation
"""
# verb - PRIVMSG
# args[1] - ACTION
def __init__(self, match: Optional[Union[str, Pattern]]=None):
self._match = match
def __call__(self, func):
# Default the command's name to an exact match of the function's name.
# ^func name$
message = self._match
if message is None:
message = re.compile('^{}$'.format(func.func_name.replace('_', ' ')))
@wraps(func)
def wrapped_command(_self, info):
if isinstance(message, Pattern):
info['match'] = _replace(_self, message).match(info['message'])
return func(_self, info)
wrapped_command._type = 'ACTION'
wrapped_command._match = {'message': message}
return wrapped_command
### Hooks that trigger on the NOTICE verb, custom match against the message
class notice(object):
"""
TODO: Documentation
"""
# verb - NOTICE
def __init__(self, match: Optional[Union[str, Pattern]]=None):
self._match = match
def __call__(self, func):
# Default the command's name to an exact match of the function's name.
# ^func name$
message = self._match
if message is None:
message = re.compile('^{}$'.format(func.func_name.replace('_', ' ')))
@wraps(func)
def wrapped_command(_self, info):
if isinstance(message, Pattern):
info['match'] = _replace(_self, message).match(info['message'])
return func(_self, info)
wrapped_command._type = 'NOTICE'
wrapped_command._match = {'message': message}
return wrapped_command
### Hooks that trigger on a numeric verb
class code(object):
"""
TODO: Documentation
"""
# verb - 3 digit number
def __init__(self, code: int):
if code > 999:
raise Exception(
"Numeric code must be an integer less than 999 for a code hook."
)
self._code = code
def __call__(self, func: Callable):
@wraps(func)
def wrapped_command(_self, info):
return func(_self, info)
wrapped_command._type = 'CODE'
wrapped_command._match = {'verb': '{:03d}'.format(self._code)}
return wrapped_command
### Hooks that trigger for each incoming line, custom match against the whole line
class raw(object):
"""
TODO: Documentation
"""
# Runs against unparsed line
def __init__(self, match: Optional[Union[str, Pattern]]=None):
self._match = match
def __call__(self, func: Callable):
match = self._match
if match is None:
match = True
@wraps(func)
def wrapped_command(_self, info):
if isinstance(match, Pattern):
info['match'] = _replace(_self, match).match(info['raw'])
return func(_self, info)
wrapped_command._type = 'RAW'
wrapped_command._match = {'raw': match}
return wrapped_command
### Hooks that trigger on a specific interval or interval range in seconds, specify the min and max wait time
def interval(min: int, max: Optional[int]=None):
"""
TODO: Documentation
"""
def wrapped(func):
@wraps(func)
def wrapped_command(*args, **kwargs):
return func(*args, **kwargs)
wrapped_command._type = 'INTERVAL'
wrapped_command._thread = JobThread
wrapped_command._min = min
wrapped_command._max = max
return wrapped_command
return wrapped
### Hook that is only called once, removes self after execution
# must be declared only on an already hooked function
# eg
#
# @hooks.once()
# @hooks.code(420)
# def custom_function(val):
# print("This will wait for a line with verb '420', run, then be removed from further execution")
def once():
"""
TODO: Documentation
"""
def wrapped(func: Callable):
if hasattr(func, '_type') and not hasattr(func, '_thread'):
# only declare once on existing hooked functions that aren't threads
func._once = True
return func
return wrapped
| {
"repo_name": "techborn/pIRC",
"path": "hooks.py",
"copies": "1",
"size": "13402",
"license": "mit",
"hash": 4390673162589603000,
"line_mean": 28.3260393873,
"line_max": 109,
"alpha_frac": 0.5766303537,
"autogenerated": false,
"ratio": 4.214465408805031,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5291095762505031,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import re
import socket
import struct
import time
import gevent
import requests
def tz_hours():
delta = time.localtime().tm_hour - time.gmtime().tm_hour
sign = '-' if delta < 0 else ''
return "%s%02d.00" % (sign, abs(delta))
def is_dst():
return 1 if time.localtime().tm_isdst else 0
def get_timesync():
timesync = """
<?xml version="1.0" encoding="utf-8"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body>
<u:TimeSync xmlns:u="urn:Belkin:service:timesync:1">
<UTC>{utc}</UTC>
<TimeZone>{tz}</TimeZone>
<dst>{dst}</dst>
<DstSupported>{dstsupported}</DstSupported>
</u:TimeSync>
</s:Body>
</s:Envelope>""".format(
utc=int(time.time()),
tz=tz_hours(),
dst=is_dst(),
dstsupported=is_dst()).strip()
return timesync
def get_ip_address():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(('1.2.3.4', 9))
return s.getsockname()[0]
except socket.error:
return None
finally:
del s
def matcher(match_string):
pattern = re.compile('.*?'.join(re.escape(c) for c in match_string.lower()))
def matches(s):
return pattern.search(s.lower()) is not None
return matches
# This is pretty arbitrary. I'm choosing, for no real reason, the length of
# a subscription.
_RETRIES = 1801/60
def get_retries():
return _RETRIES
def retry_with_delay(f, delay=60):
"""
Retry the wrapped requests.request function in case of ConnectionError.
Optionally limit the number of retries or set the delay between retries.
"""
@wraps(f)
def inner(*args, **kwargs):
kwargs['timeout'] = 5
remaining = get_retries() + 1
while remaining:
remaining -= 1
try:
return f(*args, **kwargs)
except (requests.ConnectionError, requests.Timeout):
if not remaining:
raise
gevent.sleep(delay)
return inner
requests_get = retry_with_delay(requests.get)
requests_post = retry_with_delay(requests.post)
requests_request = retry_with_delay(requests.request)
| {
"repo_name": "sstangle73/ouimeaux",
"path": "ouimeaux/utils.py",
"copies": "9",
"size": "2252",
"license": "bsd-3-clause",
"hash": -1481027924456971000,
"line_mean": 24.0222222222,
"line_max": 124,
"alpha_frac": 0.6221136767,
"autogenerated": false,
"ratio": 3.55205047318612,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007816186556927297,
"num_lines": 90
} |
from functools import wraps
import re
import sys
import django
from django.db.models import F, NOT_PROVIDED
from django.db.models.sql import aggregates as sqlaggregates
from django.db.models.sql.constants import MULTI
from django.db.models.sql.where import OR
from django.db.utils import DatabaseError, IntegrityError
from django.utils.encoding import smart_str
from django.utils.tree import Node
from pymongo import ASCENDING, DESCENDING
from pymongo.errors import PyMongoError, DuplicateKeyError
from djangotoolbox.db.basecompiler import (
NonrelQuery,
NonrelCompiler,
NonrelInsertCompiler,
NonrelUpdateCompiler,
NonrelDeleteCompiler,
EmptyResultSet)
from .aggregations import get_aggregation_class_by_name
from .query import A
from .utils import safe_regex
if django.VERSION >= (1, 6):
def get_selected_fields(query):
fields = None
if query.select and not query.aggregates:
fields = [info.field.column for info in query.select]
return fields
else:
def get_selected_fields(query):
fields = None
if query.select_fields and not query.aggregates:
fields = [field.column for field in query.select_fields]
return fields
OPERATORS_MAP = {
'exact': lambda val: val,
'gt': lambda val: {'$gt': val},
'gte': lambda val: {'$gte': val},
'lt': lambda val: {'$lt': val},
'lte': lambda val: {'$lte': val},
'in': lambda val: {'$in': val},
'range': lambda val: {'$gte': val[0], '$lte': val[1]},
'isnull': lambda val: None if val else {'$ne': None},
# Regex matchers.
'iexact': safe_regex('^%s$', re.IGNORECASE),
'startswith': safe_regex('^%s'),
'istartswith': safe_regex('^%s', re.IGNORECASE),
'endswith': safe_regex('%s$'),
'iendswith': safe_regex('%s$', re.IGNORECASE),
'contains': safe_regex('%s'),
'icontains': safe_regex('%s', re.IGNORECASE),
'regex': lambda val: re.compile(val),
'iregex': lambda val: re.compile(val, re.IGNORECASE),
# Date OPs.
'year': lambda val: {'$gte': val[0], '$lt': val[1]},
}
NEGATED_OPERATORS_MAP = {
'exact': lambda val: {'$ne': val},
'gt': lambda val: {'$lte': val},
'gte': lambda val: {'$lt': val},
'lt': lambda val: {'$gte': val},
'lte': lambda val: {'$gt': val},
'in': lambda val: {'$nin': val},
'isnull': lambda val: {'$ne': None} if val else None,
}
def safe_call(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except DuplicateKeyError, e:
raise IntegrityError, IntegrityError(smart_str(e)), sys.exc_info()[2]
except PyMongoError, e:
raise DatabaseError, DatabaseError(smart_str(e)), sys.exc_info()[2]
return wrapper
class MongoQuery(NonrelQuery):
def __init__(self, compiler, fields):
super(MongoQuery, self).__init__(compiler, fields)
self.ordering = []
self.collection = self.compiler.get_collection()
self.mongo_query = getattr(compiler.query, 'raw_query', {})
def __repr__(self):
return '<MongoQuery: %r ORDER %r>' % (self.mongo_query, self.ordering)
def fetch(self, low_mark, high_mark):
results = self.get_cursor()
pk_column = self.query.get_meta().pk.column
for entity in results:
entity[pk_column] = entity.pop('_id')
yield entity
@safe_call
def count(self, limit=None):
results = self.get_cursor()
if limit is not None:
results.limit(limit)
return results.count()
@safe_call
def order_by(self, ordering):
if isinstance(ordering, bool):
# No need to add {$natural: ASCENDING} as it's the default.
if not ordering:
self.ordering.append(('$natural', DESCENDING))
else:
for field, ascending in ordering:
column = '_id' if field.primary_key else field.column
direction = ASCENDING if ascending else DESCENDING
self.ordering.append((column, direction))
@safe_call
def delete(self):
options = self.connection.operation_flags.get('delete', {})
self.collection.remove(self.mongo_query, **options)
def get_cursor(self):
if self.query.low_mark == self.query.high_mark:
return []
fields = get_selected_fields(self.query)
cursor = self.collection.find(self.mongo_query, fields)
if self.ordering:
cursor.sort(self.ordering)
if self.query.low_mark > 0:
cursor.skip(self.query.low_mark)
if self.query.high_mark is not None:
cursor.limit(int(self.query.high_mark - self.query.low_mark))
return cursor
def add_filters(self, filters, query=None):
children = self._get_children(filters.children)
if query is None:
query = self.mongo_query
if filters.connector == OR:
assert '$or' not in query, "Multiple ORs are not supported."
or_conditions = query['$or'] = []
if filters.negated:
self._negated = not self._negated
for child in children:
if filters.connector == OR:
subquery = {}
else:
subquery = query
if isinstance(child, Node):
if filters.connector == OR and child.connector == OR:
if len(child.children) > 1:
raise DatabaseError("Nested ORs are not supported.")
if filters.connector == OR and filters.negated:
raise NotImplementedError("Negated ORs are not supported.")
self.add_filters(child, query=subquery)
if filters.connector == OR and subquery:
or_conditions.extend(subquery.pop('$or', []))
if subquery:
or_conditions.append(subquery)
continue
field, lookup_type, value = self._decode_child(child)
if lookup_type in ('month', 'day'):
raise DatabaseError("MongoDB does not support month/day "
"queries.")
if self._negated and lookup_type == 'range':
raise DatabaseError("Negated range lookups are not "
"supported.")
if field.primary_key:
column = '_id'
else:
column = field.column
existing = subquery.get(column)
if isinstance(value, A):
column, value = value.as_q(field)
if self._negated and lookup_type in NEGATED_OPERATORS_MAP:
op_func = NEGATED_OPERATORS_MAP[lookup_type]
already_negated = True
else:
op_func = OPERATORS_MAP[lookup_type]
if self._negated:
already_negated = False
lookup = op_func(value)
if existing is None:
if self._negated and not already_negated:
lookup = {'$not': lookup}
subquery[column] = lookup
if filters.connector == OR and subquery:
or_conditions.append(subquery)
continue
if not isinstance(existing, dict):
if not self._negated:
# {'a': o1} + {'a': o2} --> {'a': {'$all': [o1, o2]}}
assert not isinstance(lookup, dict)
subquery[column] = {'$all': [existing, lookup]}
else:
# {'a': o1} + {'a': {'$not': o2}} -->
# {'a': {'$all': [o1], '$nin': [o2]}}
if already_negated:
assert lookup.keys() == ['$ne']
lookup = lookup['$ne']
assert not isinstance(lookup, dict)
subquery[column] = {'$all': [existing], '$nin': [lookup]}
else:
not_ = existing.pop('$not', None)
if not_:
assert not existing
if isinstance(lookup, dict):
assert lookup.keys() == ['$ne']
lookup = lookup.values()[0]
assert not isinstance(lookup, dict), (not_, lookup)
if self._negated:
# {'not': {'a': o1}} + {'a': {'not': o2}} -->
# {'a': {'nin': [o1, o2]}}
subquery[column] = {'$nin': [not_, lookup]}
else:
# {'not': {'a': o1}} + {'a': o2} -->
# {'a': {'nin': [o1], 'all': [o2]}}
subquery[column] = {'$nin': [not_], '$all': [lookup]}
else:
if isinstance(lookup, dict):
if '$ne' in lookup:
if '$nin' in existing:
# {'$nin': [o1, o2]} + {'$ne': o3} -->
# {'$nin': [o1, o2, o3]}
assert '$ne' not in existing
existing['$nin'].append(lookup['$ne'])
elif '$ne' in existing:
# {'$ne': o1} + {'$ne': o2} -->
# {'$nin': [o1, o2]}
existing['$nin'] = [existing.pop('$ne'),
lookup['$ne']]
else:
existing.update(lookup)
else:
if '$in' in lookup and '$in' in existing:
# {'$in': o1} + {'$in': o2}
# --> {'$in': o1 union o2}
existing['$in'] = list(
set(lookup['$in'] + existing['$in']))
else:
# {'$gt': o1} + {'$lt': o2}
# --> {'$gt': o1, '$lt': o2}
assert all(key not in existing
for key in lookup.keys()), \
[lookup, existing]
existing.update(lookup)
else:
key = '$nin' if self._negated else '$all'
existing.setdefault(key, []).append(lookup)
if filters.connector == OR and subquery:
or_conditions.append(subquery)
if filters.negated:
self._negated = not self._negated
class SQLCompiler(NonrelCompiler):
"""
Base class for all Mongo compilers.
"""
query_class = MongoQuery
def get_collection(self):
return self.connection.get_collection(self.query.get_meta().db_table)
def execute_sql(self, result_type=MULTI):
"""
Handles aggregate/count queries.
"""
collection = self.get_collection()
aggregations = self.query.aggregate_select.items()
if len(aggregations) == 1 and isinstance(aggregations[0][1],
sqlaggregates.Count):
# Ne need for full-featured aggregation processing if we
# only want to count().
if result_type is MULTI:
return [[self.get_count()]]
else:
return [self.get_count()]
counts, reduce, finalize, order, initial = [], [], [], [], {}
try:
query = self.build_query()
except EmptyResultSet:
return []
for alias, aggregate in aggregations:
assert isinstance(aggregate, sqlaggregates.Aggregate)
if isinstance(aggregate, sqlaggregates.Count):
order.append(None)
# Needed to keep the iteration order which is important
# in the returned value.
# XXX: This actually does a separate query... performance?
counts.append(self.get_count())
continue
aggregate_class = get_aggregation_class_by_name(
aggregate.__class__.__name__)
lookup = aggregate.col
if isinstance(lookup, tuple):
# lookup is a (table_name, column_name) tuple.
# Get rid of the table name as aggregations can't span
# multiple tables anyway.
if lookup[0] != collection.name:
raise DatabaseError("Aggregations can not span multiple "
"tables (tried %r and %r)." %
(lookup[0], collection.name))
lookup = lookup[1]
self.query.aggregates[alias] = aggregate = aggregate_class(
alias, lookup, aggregate.source)
order.append(alias) # Just to keep the right order.
initial.update(aggregate.initial())
reduce.append(aggregate.reduce())
finalize.append(aggregate.finalize())
reduce = 'function(doc, out){ %s }' % '; '.join(reduce)
finalize = 'function(out){ %s }' % '; '.join(finalize)
cursor = collection.group(None, query.mongo_query, initial, reduce,
finalize)
ret = []
for alias in order:
result = cursor[0][alias] if alias else counts.pop(0)
if result_type is MULTI:
result = [result]
ret.append(result)
return ret
class SQLInsertCompiler(NonrelInsertCompiler, SQLCompiler):
@safe_call
def insert(self, docs, return_id=False):
"""
Stores a document using field columns as element names, except
for the primary key field for which "_id" is used.
If just a {pk_field: None} mapping is given a new empty
document is created, otherwise value for a primary key may not
be None.
"""
for doc in docs:
try:
doc['_id'] = doc.pop(self.query.get_meta().pk.column)
except KeyError:
pass
if doc.get('_id', NOT_PROVIDED) is None:
if len(doc) == 1:
# insert with empty model
doc.clear()
else:
raise DatabaseError("Can't save entity with _id set to None")
collection = self.get_collection()
options = self.connection.operation_flags.get('save', {})
if return_id:
return collection.save(doc, **options)
else:
collection.save(doc, **options)
# TODO: Define a common nonrel API for updates and add it to the nonrel
# backend base classes and port this code to that API.
class SQLUpdateCompiler(NonrelUpdateCompiler, SQLCompiler):
query_class = MongoQuery
def update(self, values):
multi = True
spec = {}
for field, value in values:
if field.primary_key:
raise DatabaseError("Can not modify _id.")
if getattr(field, 'forbids_updates', False):
raise DatabaseError("Updates on %ss are not allowed." %
field.__class__.__name__)
if hasattr(value, 'evaluate'):
# .update(foo=F('foo') + 42) --> {'$inc': {'foo': 42}}
lhs, rhs = value.children
assert (value.connector in (value.ADD, value.SUB) and
not value.negated and
isinstance(lhs, F) and not isinstance(rhs, F) and
lhs.name == field.name)
if value.connector == value.SUB:
rhs = -rhs
action = '$inc'
value = rhs
else:
# .update(foo=123) --> {'$set': {'foo': 123}}
action = '$set'
spec.setdefault(action, {})[field.column] = value
if field.unique:
multi = False
return self.execute_update(spec, multi)
@safe_call
def execute_update(self, update_spec, multi=True, **kwargs):
collection = self.get_collection()
try:
criteria = self.build_query().mongo_query
except EmptyResultSet:
return 0
options = self.connection.operation_flags.get('update', {})
options = dict(options, **kwargs)
info = collection.update(criteria, update_spec, multi=multi, **options)
if info is not None:
return info.get('n')
class SQLDeleteCompiler(NonrelDeleteCompiler, SQLCompiler):
pass
| {
"repo_name": "cjaffar/jaffarchiosa",
"path": "jaffarchiosa/lib/python2.7/site-packages/django_mongodb_engine/compiler.py",
"copies": "1",
"size": "16906",
"license": "mit",
"hash": -5405475740151193000,
"line_mean": 36.9058295964,
"line_max": 81,
"alpha_frac": 0.5025434757,
"autogenerated": false,
"ratio": 4.4407670081428945,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5443310483842895,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import re
import traceback
from django.http import HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from watchman import settings
def check(func):
"""
Decorator which wraps checks and returns an error response on failure.
"""
def wrapped(*args, **kwargs):
try:
response = func(*args, **kwargs)
except Exception as e:
response = {
"ok": False,
"error": str(e),
"stacktrace": traceback.format_exc(),
}
# The check contains several individual checks (e.g., one per
# database). Preface the results by name.
if args:
response = {args[0]: response}
return response
return wrapped
def token_required(view_func):
"""
Decorator which ensures that WATCHMAN_TOKEN is provided if set.
WATCHMAN_TOKEN_NAME can also be set if the token GET parameter must be
customized.
"""
def _parse_auth_header(auth_header):
"""
Parse the `Authorization` header
Expected format: `WATCHMAN-TOKEN Token="ABC123"`
"""
# TODO: Figure out full set of allowed characters
# http://stackoverflow.com/questions/19028068/illegal-characters-in-http-headers
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec2.html#sec2.2
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2
reg = re.compile('(\w+)[=] ?"?([\w-]+)"?')
header_dict = dict(reg.findall(auth_header))
return header_dict['Token']
def _get_passed_token(request):
"""
Try to get the passed token, starting with the header and fall back to `GET` param
"""
try:
auth_header = request.META['HTTP_AUTHORIZATION']
token = _parse_auth_header(auth_header)
except KeyError:
token = request.GET.get(settings.WATCHMAN_TOKEN_NAME)
return token
def _validate_token(request):
watchman_token = settings.WATCHMAN_TOKEN
if watchman_token is None:
return True
passed_token = _get_passed_token(request)
return watchman_token == passed_token
@csrf_exempt
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
if _validate_token(request):
return view_func(request, *args, **kwargs)
return HttpResponseForbidden()
return _wrapped_view
if settings.WATCHMAN_AUTH_DECORATOR is None:
def auth(view_func):
@csrf_exempt
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
return view_func(request, *args, **kwargs)
return _wrapped_view
elif settings.WATCHMAN_AUTH_DECORATOR == 'watchman.decorators.token_required':
# Avoid import loops
auth = token_required
else:
try:
from importlib import import_module
except ImportError: # Django < 1.8
from django.utils.importlib import import_module
mod_name, dec = settings.WATCHMAN_AUTH_DECORATOR.rsplit('.', 1)
auth = getattr(import_module(mod_name), dec)
| {
"repo_name": "blag/django-watchman",
"path": "watchman/decorators.py",
"copies": "2",
"size": "3160",
"license": "bsd-3-clause",
"hash": 6452464763305423000,
"line_mean": 28.8113207547,
"line_max": 90,
"alpha_frac": 0.6170886076,
"autogenerated": false,
"ratio": 4.005069708491762,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0006832728253371983,
"num_lines": 106
} |
from functools import wraps
import re
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import user_passes_test
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseForbidden, HttpResponseRedirect
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from kuma.core.urlresolvers import reverse
def user_access_decorator(redirect_func, redirect_url_func, deny_func=None,
redirect_field=REDIRECT_FIELD_NAME):
"""
Helper function that returns a decorator.
* redirect func ----- If truthy, a redirect will occur
* deny_func --------- If truthy, HttpResponseForbidden is returned.
* redirect_url_func - Evaluated at view time, returns the redirect URL
i.e. where to go if redirect_func is truthy.
* redirect_field ---- What field to set in the url, defaults to Django's.
Set this to None to exclude it from the URL.
"""
def decorator(view_fn):
def _wrapped_view(request, *args, **kwargs):
if redirect_func(request.user):
# We must call reverse at the view level, else the threadlocal
# locale prefixing doesn't take effect.
redirect_url = redirect_url_func() or reverse('account_login')
# Redirect back here afterwards?
if redirect_field:
path = urlquote(request.get_full_path())
redirect_url = '%s?%s=%s' % (
redirect_url, redirect_field, path)
return HttpResponseRedirect(redirect_url)
if deny_func and deny_func(request.user):
return HttpResponseForbidden()
return view_fn(request, *args, **kwargs)
return wraps(view_fn, assigned=available_attrs(view_fn))(_wrapped_view)
return decorator
def logout_required(redirect):
"""Requires that the user *not* be logged in."""
redirect_func = lambda u: u.is_authenticated()
if hasattr(redirect, '__call__'):
return user_access_decorator(
redirect_func, redirect_field=None,
redirect_url_func=lambda: reverse('home'))(redirect)
else:
return user_access_decorator(redirect_func, redirect_field=None,
redirect_url_func=lambda: redirect)
def login_required(func, login_url=None, redirect=REDIRECT_FIELD_NAME,
only_active=True):
"""Requires that the user is logged in."""
if only_active:
redirect_func = lambda u: not (u.is_authenticated() and u.is_active)
else:
redirect_func = lambda u: not u.is_authenticated()
redirect_url_func = lambda: login_url
return user_access_decorator(redirect_func, redirect_field=redirect,
redirect_url_func=redirect_url_func)(func)
def permission_required(perm, login_url=None, redirect=REDIRECT_FIELD_NAME,
only_active=True):
"""A replacement for django.contrib.auth.decorators.permission_required
that doesn't ask authenticated users to log in."""
redirect_func = lambda u: not u.is_authenticated()
if only_active:
deny_func = lambda u: not (u.is_active and u.has_perm(perm))
else:
deny_func = lambda u: not u.has_perm(perm)
redirect_url_func = lambda: login_url
return user_access_decorator(redirect_func, redirect_field=redirect,
redirect_url_func=redirect_url_func,
deny_func=deny_func)
# django never_cache isn't as thorough as we might like
# http://stackoverflow.com/a/2095648/571420
# http://stackoverflow.com/a/2068407/571420
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
# Fixed in Django 1.9:
# https://docs.djangoproject.com/en/1.9/topics/http/decorators/#caching
def never_cache(view_func):
def _wrapped_view_func(request, *args, **kwargs):
resp = view_func(request, *args, **kwargs)
resp['Cache-Control'] = 'no-cache, no-store, must-revalidate'
resp['Pragma'] = 'no-cache'
resp['Expires'] = '0'
return resp
return _wrapped_view_func
def is_superuser(u):
if u.is_authenticated():
if u.is_superuser:
return True
raise PermissionDenied
return False
superuser_required = user_passes_test(is_superuser)
#: A decorator to use for requiring a superuser
def block_user_agents(view_func):
blockable_user_agents = getattr(settings, 'BLOCKABLE_USER_AGENTS', [])
blockable_ua_patterns = []
for agent in blockable_user_agents:
blockable_ua_patterns.append(re.compile(agent))
def agent_blocked_view(request, *args, **kwargs):
http_user_agent = request.META.get('HTTP_USER_AGENT', None)
if http_user_agent is not None:
for pattern in blockable_ua_patterns:
if pattern.search(request.META['HTTP_USER_AGENT']):
return HttpResponseForbidden()
return view_func(request, *args, **kwargs)
return wraps(view_func,
assigned=available_attrs(view_func))(agent_blocked_view)
| {
"repo_name": "yfdyh000/kuma",
"path": "kuma/core/decorators.py",
"copies": "1",
"size": "5280",
"license": "mpl-2.0",
"hash": 3233299262566167000,
"line_mean": 37.2608695652,
"line_max": 79,
"alpha_frac": 0.6392045455,
"autogenerated": false,
"ratio": 4.0121580547112465,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0010829381973039847,
"num_lines": 138
} |
from functools import wraps
import re
from flask import Blueprint, g, make_response
from mcavatar import app
from mcavatar.avatar import Avatar
img = Blueprint('img', __name__)
username_re = re.compile('([A-Z_0-9]){2,16}', re.I)
d_size = app.config.get('DEFAULT_IMG_SIZE', 48)
max_size = app.config.get('MAX_IMG_SIZE', 999)
min_size = app.config.get('MIN_IMG_SIZE', 16)
def valid_user(user):
if len(user) > 16 or not username_re.match(user):
return False
else:
return True
def validate(func):
@wraps(func)
def wrapped(helm, user, size=d_size, *a, **kw):
if not valid_user(user):
user = 'char'
if size > max_size:
size = max_size
elif size < min_size:
size = min_size
helm = helm.lower()
if helm not in ('h', 'f'):
helm = 'h'
return func(helm, user, size, *a, **kw)
return wrapped
def image_response(user, size=d_size, helmet='h'):
key = '{0}_{1}_{2}'.format(size, helmet, user)
img = g.redis.get(key)
if img is None:
try:
a = Avatar(user, size, helmet)
img = a.render()
except:
return image_response('char', size)
response = make_response(img)
response.headers['Content-Type'] = 'image/png'
response.headers['Content-Disposition'] = 'inline'
return response
@img.route('/<helm>/<user>/<int:size>')
@img.route('/<helm>/<user>/<int:size>.png')
@img.route('/<helm>/<user>')
@img.route('/<helm>/<user>.png')
@validate
def avatar(helm, user, size=d_size):
return image_response(user, size, helm)
@img.route('/update/<user>')
def update(user):
if not valid_user(user):
return 'bad user'
keys = g.redis.keys('*_{0}'.format(user))
if keys != []:
g.redis.delete(*keys)
return 'ok'
| {
"repo_name": "joealcorn/MCAvatar",
"path": "mcavatar/views/img/__init__.py",
"copies": "1",
"size": "1841",
"license": "mit",
"hash": 1117927935651683500,
"line_mean": 22.6025641026,
"line_max": 54,
"alpha_frac": 0.5812058664,
"autogenerated": false,
"ratio": 3.130952380952381,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.920815183709597,
"avg_score": 0.0008012820512820513,
"num_lines": 78
} |
from functools import wraps
import requests
from dbt.exceptions import RegistryException
from dbt.utils import memoized
from dbt.logger import GLOBAL_LOGGER as logger
import os
import time
if os.getenv('DBT_PACKAGE_HUB_URL'):
DEFAULT_REGISTRY_BASE_URL = os.getenv('DBT_PACKAGE_HUB_URL')
else:
DEFAULT_REGISTRY_BASE_URL = 'https://hub.getdbt.com/'
def _get_url(url, registry_base_url=None):
if registry_base_url is None:
registry_base_url = DEFAULT_REGISTRY_BASE_URL
return '{}{}'.format(registry_base_url, url)
def _wrap_exceptions(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
max_attempts = 5
attempt = 0
while True:
attempt += 1
try:
return fn(*args, **kwargs)
except requests.exceptions.ConnectionError as exc:
if attempt < max_attempts:
time.sleep(1)
continue
raise RegistryException(
'Unable to connect to registry hub'
) from exc
return wrapper
@_wrap_exceptions
def _get(path, registry_base_url=None):
url = _get_url(path, registry_base_url)
logger.debug('Making package registry request: GET {}'.format(url))
resp = requests.get(url)
logger.debug('Response from registry: GET {} {}'.format(url,
resp.status_code))
resp.raise_for_status()
return resp.json()
def index(registry_base_url=None):
return _get('api/v1/index.json', registry_base_url)
index_cached = memoized(index)
def packages(registry_base_url=None):
return _get('api/v1/packages.json', registry_base_url)
def package(name, registry_base_url=None):
return _get('api/v1/{}.json'.format(name), registry_base_url)
def package_version(name, version, registry_base_url=None):
return _get('api/v1/{}/{}.json'.format(name, version), registry_base_url)
def get_available_versions(name):
response = package(name)
return list(response['versions'])
| {
"repo_name": "analyst-collective/dbt",
"path": "core/dbt/clients/registry.py",
"copies": "2",
"size": "2058",
"license": "apache-2.0",
"hash": 3729777574725439500,
"line_mean": 26.8108108108,
"line_max": 78,
"alpha_frac": 0.6253644315,
"autogenerated": false,
"ratio": 3.6947935368043088,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 74
} |
from functools import wraps
import secrets
from django.core.signing import TimestampSigner, SignatureExpired
from django.shortcuts import redirect, render
from hc.api.models import TokenBucket
from hc.lib import emails
def _session_unsign(request, key, max_age):
if key not in request.session:
return None
try:
return TimestampSigner().unsign(request.session[key], max_age=max_age)
except SignatureExpired:
pass
def require_sudo_mode(f):
@wraps(f)
def wrapper(request, *args, **kwds):
assert request.user.is_authenticated
# is sudo mode active and has not expired yet?
if _session_unsign(request, "sudo", 1800) == "active":
return f(request, *args, **kwds)
if not TokenBucket.authorize_sudo_code(request.user):
return render(request, "try_later.html")
# has the user submitted a code to enter sudo mode?
if "sudo_code" in request.POST:
ours = _session_unsign(request, "sudo_code", 900)
if ours and ours == request.POST["sudo_code"]:
request.session.pop("sudo_code")
request.session["sudo"] = TimestampSigner().sign("active")
return redirect(request.path)
if not _session_unsign(request, "sudo_code", 900):
code = "%06d" % secrets.randbelow(1000000)
request.session["sudo_code"] = TimestampSigner().sign(code)
emails.sudo_code(request.user.email, {"sudo_code": code})
ctx = {}
if "sudo_code" in request.POST:
ctx["wrong_code"] = True
return render(request, "accounts/sudo.html", ctx)
return wrapper
| {
"repo_name": "healthchecks/healthchecks",
"path": "hc/accounts/decorators.py",
"copies": "2",
"size": "1683",
"license": "bsd-3-clause",
"hash": -6255282899579065000,
"line_mean": 32,
"line_max": 78,
"alpha_frac": 0.6274509804,
"autogenerated": false,
"ratio": 3.886836027713626,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 51
} |
from functools import wraps
import shutil
import tempfile
import os
import pytest
def pytest_addoption(parser):
group = parser.getgroup('fits', 'Fits comparison')
group.addoption('--runtest', action='store_true',
help="Enable comparison of fits images")
group.addoption('--generate-images',
help="directory to generate reference images in, relative to location where py.test is run",
action='store')
group.addoption('--define-path',
help="directory containing baseline images, relative to location where py.test is run",
action='store')
def pytest_configure(config):
if config.getoption("--generate-images") is not None:
if config.getoption("--define-path") is not None:
raise ValueError(
"Can't set --define-path when generating reference images with --generate-path")
if config.getoption("--runtest") or config.getoption(
"--generate-images") is not None:
baseline_dir = config.getoption("--define-path")
generate_dir = config.getoption("--generate-images")
if baseline_dir is not None:
baseline_dir = os.path.abspath(baseline_dir)
if generate_dir is not None:
baseline_dir = os.path.abspath(generate_dir)
config.pluginmanager.register(
ImageComparison(config, baseline_dir=baseline_dir,
generate_dir=generate_dir))
class ImageComparison(object):
def __init__(self, config, baseline_dir=None, generate_dir=None):
self.config = config
self.baseline_dir = baseline_dir
self.generate_dir = generate_dir
def _fits_comparison(self, file1, file2, tolerance):
from astropy.io import fits
from astropy.io.fits.diff import ImageDataDiff
hdulist1 = fits.open(file1)
hdulist2 = fits.open(file2)
data = ImageDataDiff(hdulist1[0].data, hdulist2[0].data,
tolerance=tolerance)
assert data.identical
def pytest_runtest_setup(self, item):
compare = item.keywords.get('fits_image_compare')
if compare is None:
return
tolerance = compare.kwargs.get('tolerance', 2)
original = item.function
@wraps(item.function)
def item_function_wrapper(*args, **kwargs):
import inspect
baseline_dir = compare.kwargs.get('baseline_dir', None)
if baseline_dir is None:
if self.baseline_dir is None:
baseline_dir = os.path.join(
os.path.dirname(item.fspath.strpath), 'baseline')
else:
baseline_dir = self.baseline_dir
else:
baseline_dir = os.path.join(
os.path.dirname(item.fspath.strpath), baseline_dir)
if inspect.ismethod(original):
fig = original(*args[1:], **kwargs)
else: # function
fig = original(*args, **kwargs)
# Find test name to use as plot name
filename = compare.kwargs.get('filename', None)
if filename is None:
filename = inspect.getmodule(original).__name__ +'_' + original.__name__ + '.fits'
# What we do now depends on whether we are generating the reference
# images or simply running the test.
if self.generate_dir is None:
# Save the figure
result_dir = tempfile.mkdtemp()
test_image = os.path.abspath(
os.path.join(result_dir, filename))
fig.writeto(test_image, clobber=True)
# Find path to baseline image
baseline_image_ref = os.path.abspath(
os.path.join(os.path.dirname(item.fspath.strpath),
baseline_dir, filename))
if not os.path.exists(baseline_image_ref):
raise Exception("""Image file not found for comparison test
Generated Image:
\t{test}
This is expected for new tests.""".format(
test=test_image))
# distutils may put the baseline images in non-accessible places,
# copy to our tmpdir to be sure to keep them in case of failure
baseline_image = os.path.abspath(
os.path.join(result_dir, 'baseline-' + filename))
shutil.copyfile(baseline_image_ref, baseline_image)
self._fits_comparison(baseline_image, test_image, tolerance)
else:
if not os.path.exists(self.generate_dir):
os.makedirs(self.generate_dir)
fig.writeto(
os.path.abspath(os.path.join(self.generate_dir, filename)),
clobber=True)
pytest.skip("Skipping test, since generating data")
# Cuando es una funcion o es una clase
if item.cls is not None:
setattr(item.cls, item.function.__name__, item_function_wrapper)
else:
item.obj = item_function_wrapper
| {
"repo_name": "guaix-ucm/pytest-fits",
"path": "tests/conftest.py",
"copies": "2",
"size": "5322",
"license": "bsd-3-clause",
"hash": 2789905307434596400,
"line_mean": 37.2877697842,
"line_max": 112,
"alpha_frac": 0.5612551672,
"autogenerated": false,
"ratio": 4.564322469982847,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0004375196926988369,
"num_lines": 139
} |
from functools import wraps
import six
import requests
from dbt.exceptions import RegistryException
from dbt.utils import memoized
import os
if os.getenv('DBT_PACKAGE_HUB_URL'):
DEFAULT_REGISTRY_BASE_URL = os.getenv('DBT_PACKAGE_HUB_URL')
else:
DEFAULT_REGISTRY_BASE_URL = 'https://hub.getdbt.com/'
def _get_url(url, registry_base_url=None):
if registry_base_url is None:
registry_base_url = DEFAULT_REGISTRY_BASE_URL
return '{}{}'.format(registry_base_url, url)
def _wrap_exceptions(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
try:
return fn(*args, **kwargs)
except requests.exceptions.ConnectionError as e:
six.raise_from(
RegistryException('Unable to connect to registry hub'), e)
return wrapper
@_wrap_exceptions
def _get(path, registry_base_url=None):
url = _get_url(path, registry_base_url)
resp = requests.get(url)
resp.raise_for_status()
return resp.json()
def index(registry_base_url=None):
return _get('api/v1/index.json', registry_base_url)
index_cached = memoized(index)
def packages(registry_base_url=None):
return _get('api/v1/packages.json', registry_base_url)
def package(name, registry_base_url=None):
return _get('api/v1/{}.json'.format(name), registry_base_url)
def package_version(name, version, registry_base_url=None):
return _get('api/v1/{}/{}.json'.format(name, version), registry_base_url)
def get_available_versions(name):
response = package(name)
return list(response['versions'])
| {
"repo_name": "nave91/dbt",
"path": "dbt/clients/registry.py",
"copies": "1",
"size": "1562",
"license": "apache-2.0",
"hash": 4843871729963999000,
"line_mean": 24.606557377,
"line_max": 77,
"alpha_frac": 0.6805377721,
"autogenerated": false,
"ratio": 3.316348195329087,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44968859674290873,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import sys
from asyncio import Queue, coroutine, iscoroutine, async, Future, gather
import logging
import inspect
logger = logging.getLogger('octobot:EventManager')
class RegistrationException: pass
class _EventManager(object):
def __init__(self):
providers = {}
self.__registration = {}
self.__module_functions = {}
self.__events = Queue()
@coroutine
def handleEvents(self):
while True:
event, args, future = yield from self.__events.get()
logger.debug("Handling event {}".format(event))
for fn, expects in self.__registration[event[0]]:
fire = True
if len(event) - 1 != len(expects):
continue
for i in range(len(event)-1):
ev = event[i+1].lower()
ex = expects[i]
if isinstance(ex, list):
if not any(ev == val.lower() for val in ex):
logger.error("Won't fire")
fire = False
break
else:
if ev.lower() != ex.lower():
fire = False
break
if fire:
logger.debug("Firing event function: {} with {}".format(fn.__name__, args))
ret = fn(event=event, **args)
future.set_result(ret)
@coroutine
def handle_event(self, event, args):
logger.debug('Handling event {}'.format(event))
to_call = []
results = []
for fn, expects in self.__registration[event[0]]:
fire = True
if len(event) -1 != len(expects):
continue
for i in range(len(event)-1):
ev = event[i+1].lower()
ex = expects[i]
if isinstance(ex, list):
if not any(ev == val.lower() for val in ex):
logger.error("Won't fire")
fire = False
break
else:
if ev.lower() != ex.lower():
fire = False
break
if fire:
to_call.append(fn(event=event, **args))
if len(to_call) > 0:
results = yield from gather(*to_call)
return results
def register_class(self, cls):
methods = inspect.getmembers(cls, predicate=inspect.ismethod)
for _, f in methods:
fn = f
event = getattr(fn, '__event__', None)
if event is not None:
logger.debug('Registering {} for {}'.format(fn.__name__, event))
self.register_function(event, fn)
def register_function(self, event, func):
primary = event[0]
expects = []
if len(event) > 1:
expects = event[1:]
if not primary in self.__registration:
self.__registration[primary] = []
self.__registration[primary].append([func, expects])
mod = sys.modules[func.__module__]
if not mod in self.__module_functions:
self.__module_functions[mod] = []
self.__module_functions[mod].append(func)
@coroutine
def fire_event(self, *event, **kwargs):
results = yield from self.handle_event(event, kwargs)
return results
def unregisterModuleFunctions(self, mod):
if not mod in self.__module_functions:
return True
for r in __registration:
self.__registration[r][:] = [i for i,_ in self.__registration[r] if i not in self.__module_functions[mod]]
del self.__module_functions[mod]
EventManager = _EventManager()
def BindEvent(*event):
def decorator(func):
@wraps(func)
def func_wrapper(*args, **kwargs):
return func(*args, **kwargs)
if len(event) > 0:
EventManager.registerFunction(event, func)
return func_wrapper
return decorator
def bind_event(*event):
def decorator(func):
@wraps(func)
def func_wrapper(*args, **kwargs):
fn = func
if not iscoroutine(fn):
fn = coroutine(fn)
return fn(*args, **kwargs)
if len(event) > 0:
func_wrapper.__event__ = event
return func_wrapper
return decorator
@coroutine
def fire_event(*event, **kwargs):
logger.debug("Firing event {} with {}".format(event, kwargs))
f = yield from EventManager.fire_event(*event, **kwargs)
return f
| {
"repo_name": "Thezomg/OctoBot",
"path": "octobot/events.py",
"copies": "1",
"size": "4667",
"license": "mit",
"hash": -8123870506636604000,
"line_mean": 31.1862068966,
"line_max": 118,
"alpha_frac": 0.5069637883,
"autogenerated": false,
"ratio": 4.504826254826255,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5511790043126255,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import sys
from collections import OrderedDict
from . import ui, decorators
# TODO - Add doc strings
# TODO - Add better comments
#-----------------------------------------------------------------------------#
# Helper Functions
#-----------------------------------------------------------------------------#
def do_nothing():
pass
#-----------------------------------------------------------------------------#
# Main Classes
#-----------------------------------------------------------------------------#
class Navigator(object):
def __init__(self, message="What do you want to do?", intro=None,
done_name='quit', no_confirm=True, default_choice=None):
self.actors = OrderedDict()
self.message = message
self.intro = intro
self.completed = Actor(done_name, sys.exit)
self.no_confirm = no_confirm
self.default_choice = default_choice
def _add_actor(self, actor):
if actor.name in self.actors:
raise NameError("Name '{}' is already assigned".format(actor.name))
self.actors[actor.name] = actor
def route(self, name, blurb=""):
"""Decorator for registering functions"""
def inner(f):
actor = Actor(name, f, blurb)
self._add_actor(actor)
@wraps(f)
def wrapped(*args, **kwargs):
return f(*args, **kwargs)
return wrapped
return inner
def register_assistant(self, assistant):
self._add_actor(assistant)
def __repr__(self):
return "<Navigator - {}>".format(self.intro)
def display_info(self):
if self.intro is not None:
ui.text_info(self.intro)
@decorators.catch_exit_keys
def _do_run(self):
choices = [(self.completed.label, self.completed)]
for key in iter(self.actors):
actor = self.actors[key]
choices.append((actor.label, actor))
picked = ui.choice(self.message, choices, self.default_choice)
if self.no_confirm or ui.confirm("Run {}?".format(picked.name), True):
picked.run()
def run(self):
self.display_info()
while True:
self._do_run()
class Assistant(Navigator):
def __init__(self, name, blurb, message="What do you want to do?",
done_name='back', no_confirm=True, default_choice=None):
super(Assistant, self).__init__(message=message, no_confirm=no_confirm, default_choice=default_choice)
self.blurb = blurb
self.name = name
self.label = "{} - {}".format(name, blurb)
self.completed = Actor(done_name, do_nothing)
def __repr__(self):
return "<Assistant {}>".format(self.label)
def display_info(self):
ui.text_info(self.label)
def run(self):
self.display_info()
self._do_run()
class Actor(object):
def __init__(self, name, func, blurb=""):
self.name = name
self.blurb = blurb
self.func = func
if blurb:
self.label = "{} - {}".format(name, blurb)
else:
self.label = name
def __repr__(self):
return "<Actor {}>".format(self.label)
def run(self):
return self.func()
| {
"repo_name": "andytom/navigator",
"path": "navigator/navigator.py",
"copies": "1",
"size": "3279",
"license": "bsd-3-clause",
"hash": -7279851727161895000,
"line_mean": 29.3611111111,
"line_max": 110,
"alpha_frac": 0.5215004575,
"autogenerated": false,
"ratio": 4.19309462915601,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005463797130463798,
"num_lines": 108
} |
from functools import wraps
import sys
from etcd import client
from conf.appconfig import HEALTH_OK, HEALTH_FAILED, TOTEM_ETCD_SETTINGS
from orchestrator.services.storage.factory import get_store
from orchestrator.tasks.common import ping
from orchestrator.util import timeout
HEALTH_TIMEOUT_SECONDS = 10
def _check(func):
"""
Wrapper that creates a dictionary response containing 'status' and
'details'.
where status can be
'ok': If wrapped function returns successfully.
'failed': If wrapped function throws error.
details is:
returned value from the wrapped function if no exception is thrown
else string representation of exception when exception is thrown
:param func: Function to be wrapped
:return: dictionary output containing keys 'status' and 'details'
:rtype: dict
"""
@wraps(func)
def inner(*args, **kwargs):
try:
return {
'status': HEALTH_OK,
'details': func(*args, **kwargs)
}
except:
return {
'status': HEALTH_FAILED,
'details': str(sys.exc_info()[1])
}
return inner
@timeout(HEALTH_TIMEOUT_SECONDS)
@_check
def _check_etcd():
etcd_cl = client.Client(
host=TOTEM_ETCD_SETTINGS['host'],
port=TOTEM_ETCD_SETTINGS['port'])
return {
'machines': etcd_cl.machines
}
@timeout(HEALTH_TIMEOUT_SECONDS)
@_check
def _check_store():
"""
Checks health of default store
"""
return get_store().health()
@timeout(HEALTH_TIMEOUT_SECONDS)
@_check
def _check_celery():
"""
Checks health for celery integration using ping-pong task output.
"""
output = ping.delay().get(timeout=HEALTH_TIMEOUT_SECONDS)
return 'Celery ping:%s' % output
def get_health(check_celery=True):
"""
Gets the health of the all the external services.
:return: dictionary with
key: service name like etcd, celery, elasticsearch
value: dictionary of health status
:rtype: dict
"""
health_status = {
'etcd': _check_etcd(),
'store': _check_store()
}
if check_celery:
health_status['celery'] = _check_celery()
return health_status
| {
"repo_name": "totem/cluster-orchestrator",
"path": "orchestrator/services/health.py",
"copies": "1",
"size": "2266",
"license": "mit",
"hash": 2282720362820664300,
"line_mean": 24.4606741573,
"line_max": 74,
"alpha_frac": 0.6306266549,
"autogenerated": false,
"ratio": 3.9615384615384617,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5092165116438462,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import sys
from flask import Flask, request, abort, g, jsonify, render_template
from src.db.models import Group, User, email_is_valid, Invite
from src.db.database import Database
from flask.ext.cors import CORS, cross_origin
import datetime
import jinja2
import os
import logging
# Defaults to stdout
logging.basicConfig(level=logging.INFO)
# get the logger for the current Python module
loggerObject = logging.getLogger(__name__)
app = Flask(__name__,)
cors = CORS(app)
app.config['CORS_HEADERS'] = ['Content-Type', 'Authorization', 'Accept']
path = os.path.dirname(os.path.abspath(sys.argv[0]))
# for root, dirs, files in os.walk("templates", topdown=True):
# for name in files:
# print(os.path.join(root, name))
# for name in dirs:
# print(os.path.join(root, name))
#
# my_loader = jinja2.ChoiceLoader([
# app.jinja_loader,
# jinja2.FileSystemLoader(os.path.join(path, 'templates/')),
# ])
# app.jinja_loader = my_loader
def log(to_write):
print("{} {}".format(datetime.datetime.now().strftime("%b %d %H:%M:%S"),
to_write))
@app.before_request
def init_db():
g.database = Database('mongodb://admin:admin@ds063879.mongolab.com:63879/heroku_app34205970')
def login_required(f):
@wraps(f)
def decorated(*args, **kwargs):
authorization = request.headers.get('Authorization')
if not check_authorization(authorization):
abort(403)
return f(*args, **kwargs)
return decorated
# @app.route('/login/twitter', methods=['POST'])
# def login_twitter():
#
# log("Logging in with Twitter.")
#
# username = request.json.get('username')
# user_id = request.json.get('user_id')
# provider_name = request.json.get('provider_name')
# access_token = request.json.get('access_token')
# access_secret = request.json.get('access_secret')
#
# log("Twitter username: {}\nID: {}.".format(username,
# user_id))
#
# user = User.create(email, password)
# log("Created Twitter user.")
# user.save()
# log("Saved Twitter user to database.")
def check_authorization(authorization):
if authorization is not None:
try:
ffinder, access_key = authorization.split(' ')
except ValueError:
return False
if ffinder != 'FFINDER':
return False
try:
g.user = User.get_by_access_token(access_key)
except User.DoesNotExist:
return False
return True
def create_response_data(data, status_code):
return {
'data': data,
'status_code': status_code
}
def create_response_error(error_name, error_message, status_code):
return {
'error': {
'name': error_name,
'message': error_message
},
'status_code': status_code
}
@app.route('/users/register', methods=['POST'])
@cross_origin(headers=['Content-Type', 'Authorization', 'Accept'])
def register_user():
email = request.json.get('email')
password = request.json.get('password')
if not email_is_valid(email):
response_data = create_response_error(
'InvalidEmail',
'This email is invalid',
409
)
return jsonify(response_data)
if not password:
response_data = create_response_error(
'InvalidPassword',
'This password is invalid',
409
)
return jsonify(response_data)
try:
user = User.register(email, password)
except User.EmailAlreadyInUse:
response_data = create_response_error(
'UsedEmail',
'This email is already in use',
409
)
return jsonify(response_data)
user.save()
g.user = user
# Create a Friends default group for the user
# This group has the same id as the user id
friends_group = Group.create(group_id=user.id,
name="Friends",
creator=user.id)
friends_group.save()
response_data = create_response_data(
user.to_dict(),
200
)
return jsonify(response_data), response_data['status_code']
@app.route('/users/login', methods=['POST'])
@cross_origin(headers=['Content-Type', 'Authorization', 'Accept'])
def login_user():
email = request.json.get('email')
password = request.json.get('password')
if not (email or password):
response_data = create_response_error(
'EmptyEmailOrPassword',
'The email or password is empty',
409
)
return jsonify(response_data)
try:
user = User.login(email, password)
except User.IncorrectEmailOrPassword:
response_data = create_response_error(
'IncorrectEmailOrPassword',
'The email or password is incorrect',
409
)
return jsonify(response_data)
except User.UserNotExists:
response_data = create_response_error(
'UserNotExists',
'The user was not found in the database!',
409
)
return jsonify(response_data)
g.user = user
response_data = create_response_data(
user.to_dict(),
200
)
return jsonify(response_data)
@app.errorhandler(400)
def bad_request(e):
response_data = create_response_error(
'BadRequest',
'Bad request',
400
)
return jsonify(response_data), response_data['status_code']
@app.errorhandler(403)
def forbidden(e):
response_data = create_response_error(
'Forbidden',
'Forbidden',
403
)
return jsonify(response_data), response_data['status_code']
@app.errorhandler(404)
def page_not_found(e):
response_data = create_response_error(
'PageNotFound',
'Sorry, nothing at this URL',
404
)
return jsonify(response_data), response_data['status_code']
@app.errorhandler(405)
def method_not_allowed(e):
response_data = create_response_error(
'MethodNotAllowed',
'The method is not allowed for the requested URL',
405
)
return jsonify(response_data), response_data['status_code']
@app.errorhandler(500)
def internal_server_error(e):
response_data = create_response_error(
'InternalServerError',
'The server could not fulfill the request',
500
)
return jsonify(response_data), response_data['status_code']
@app.route('/login/facebook', methods=['POST'])
@cross_origin(headers=['Content-Type', 'Authorization', 'Accept'])
def login_facebook():
email = request.json.get('email')
password = request.json.get('password')
user = User.create(email, password)
user.save()
response_data = create_response_data(
user.to_dict(),
200
)
return jsonify(response_data), response_data['status_code']
@app.route('/users/location', methods=['POST'])
@cross_origin(headers=['Content-Type', 'Authorization', 'Accept'])
@login_required
def update_user_location():
log("In update_user_location method")
lat = request.json.get('lat')
lon = request.json.get('lon')
user_id = g.user.id
log("Got user details in update_user_location")
User.update_location(user_id, lat, lon)
log("Updated location")
response_data = create_response_data(
"Updated location",
200
)
return jsonify(response_data), response_data['status_code']
@app.route('/groups/<group_id>/locations', methods=['GET'])
@cross_origin(headers=['Content-Type', 'Authorization', 'Accept'])
@login_required
def get_friend_locations(group_id):
group = Group.get_by_id(group_id)
ret = []
for friend_id in group.users:
ret.extend([User.get_by_id(friend_id).to_dict()])
response_data = create_response_data(
{'friends': ret},
200
)
return jsonify(response_data), response_data['status_code']
@app.route('/groups/<group_id>/add', methods=['POST'])
@cross_origin(headers=['Content-Type', 'Authorization', 'Accept'])
@login_required
def add_member_to_group(group_id):
log("Adding member to group...")
user_id = request.json.get('user_id')
user_email = request.json.get('email')
log("Going to check e-mail and user id...")
if user_email != "" and user_email is not None and email_is_valid(user_email):
user = User.get_by_email(user_email)
if user is not None:
log("Email: Adding {} to group {}".format(user_email, group_id))
Group.add_member(group_id, user.id)
else:
invite = Invite.create(user_email, g.user.id)
invite.save()
invite.send()
else:
if user_id != "" and user_id is not None:
log("ID: Adding {} to group {}".format(user_id, group_id))
Group.add_member(group_id, user_id)
else:
response_data = create_response_error(
"InternalServerError",
"The server could not fulfil your request",
500
)
return jsonify(response_data), response_data['status_code']
response_data = create_response_data(
Group.get_by_id(group_id).to_dict(),
200
)
return jsonify(response_data), response_data['status_code']
@app.route('/groups', methods=['POST'])
@cross_origin(headers=['Content-Type', 'Authorization', 'Accept'])
@login_required
def create_group():
group_id = request.json.get('group_id')
name = request.json.get('name')
group = Group.create(group_id=group_id,
creator=g.user.id,
name=name)
group.save()
response_data = create_response_data(
group.to_dict(),
200
)
return jsonify(response_data), response_data['status_code']
@app.route('/confirm/<token>', methods=['GET'])
@cross_origin(headers=['Content-Type', 'Authorization', 'Accept'])
def confirm(token):
log("Starting confirmation...")
invite = Invite.get_by_token(token)
log("Inviter ID: {}".format(invite.inviter_id))
inviter = User.get_by_id(invite.inviter_id)
log("Invited by: {}".format(inviter.email))
try:
return render_template('invite.html',
email=invite.email,
token=token,
inviter_email=inviter.email), 200
except Exception:
type, ex, trace = sys.exc_info()
loggerObject.error(path + " | " + os.path.join(path, '../../app/templates/'))
loggerObject.error(type)
loggerObject.error(ex.message)
response_data = create_response_error(
"InternalServerError",
"The server could not display the template",
500
)
return jsonify(response_data), response_data['status_code']
@app.route('/activate/<token>', methods=['POST'])
@cross_origin(headers=['Content-Type', 'Authorization', 'Accept'])
def activate_invite(token):
password = request.form['password']
Invite.activate(token, password)
response_data = create_response_data(
"Success!",
200
)
return jsonify(response_data), response_data['status_code']
if __name__ == '__main__':
app.run(debug=True) | {
"repo_name": "jslvtr/FriendFinderBackend",
"path": "src/app/FriendFinderBackend.py",
"copies": "1",
"size": "11417",
"license": "mit",
"hash": -8175559130737210000,
"line_mean": 27.1231527094,
"line_max": 97,
"alpha_frac": 0.6031356749,
"autogenerated": false,
"ratio": 3.801864801864802,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9902914620314223,
"avg_score": 0.0004171712901156597,
"num_lines": 406
} |
from functools import wraps
import sys
import datetime
import re
import sys
import os
import time
import traceback
import cgitb
import cgi
from ast import literal_eval as eval
from collections import OrderedDict as odict
from operator import eq, gt, lt, contains
from shot.exc import ShotException, RouteNotFoundError, process_generic_exc, TemplateSyntaxError
from shot.templater import Templater
HEADERS = [
('Content-Type', 'text/html'),
#('Server', str(sys.version.split(maxsplit=1)[0]))
]
settings = dict(
DEBUG=True,
ENCODING='utf-8',
TEMPLATES_DIR='templates',
BASE_DIR=os.getcwd())
ASSETS_DIR = os.path.dirname(__file__) + '/assets/'
APP_ROUTES = odict()
ROUTES_TO_ADD = []
def route(url='', status_code="200 OK"):
def deco(view_function):
view_function.url = url
view_function.status_code = status_code
APP_ROUTES[url] = (status_code, view_function)
return view_function
return deco
def render(template, context=None):
'Simple wrapper for Templater'
return Templater(template, context).render()
def process_routes():
APP_ROUTES.update({ obj.url: (obj.status_code, obj) \
for obj in globals().values() \
if callable(obj) and hasattr(obj, "url")})
class HTTPRequest:
def __init__(self, environ=None, view_function=None):
self.method = 'GET'
self.GET = {}
self.POST = {}
self.FILES = {}
mapping = dict(
route='PATH_INFO',
uri='RAW_URI',
method='REQUEST_METHOD',
server='SERVER_NAME',
referer='HTTP_REFERER',
agent='HTTP_USER_AGENT',
accept='HTTP_ACCEPT',
language='HTTP_ACCEPT_LANGUAGE',
content_length='CONTENT_LENGTH',
)
# print("ENV:", environ)
if environ:
for x, y in mapping.items(): setattr(self, x, environ.get(y, ''))
try:
if self.method == 'GET' and environ.get('QUERY_STRING', ''):
self.GET.update(dict([x.split("=") for x in environ.get('QUERY_STRING', '').split("&")]))
elif self.method == 'POST':
post = cgi.FieldStorage(
fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=True
)
self._post = post
for field in post:
if getattr(post[field], "filename", None):
self.FILES[field] = post[field].file
elif isinstance(post[field], list):
self.FILES[field], self.POST[field] = [], []
for item in post[field]:
if getattr(item, "filename", None): self.FILES[field].append(item.file)
else: self.POST[field].append(item.value)
for dict_ in (self.FILES, self.POST):
if len(dict_[field]) == 1: dict_[field] = dict_[field][0]
if not dict_[field]: del dict_[field]
else:
self.POST[field] = post.getvalue(field)
except:
pass
if view_function:
self.view_function = view_function
def application(environ, start_response):
if settings['DEBUG']: cgitb.enable()
request = HTTPRequest(environ)
try:
t1 = time.time()
process_routes()
try:
try:
status_code, view_function = APP_ROUTES[environ['PATH_INFO']]
request.view_function = view_function.__name__
except KeyError: raise RouteNotFoundError(request.route)
# Eval view function
data = view_function(request)
headers = [
('Content-type', 'text/html; charset=%s' % settings['ENCODING']),
('Content-Length', str(len(data))),
]
start_response(status_code, headers)
if isinstance(data, str):
return [data.encode(settings.get('ENCODING', 'utf-8'))]
return [data]
except ShotException as err:
return err.render(request)
except Exception as err:
return process_generic_exc(err, request)
finally:
time_data = dict(method=environ['REQUEST_METHOD'], route=environ['PATH_INFO'], time=(time.time() - t1)*1000)
print(">>> {method} {route}: {time:5.3f} ms".format(**time_data))
| {
"repo_name": "2peppers/shot",
"path": "build/lib/shot/__init__.py",
"copies": "1",
"size": "4642",
"license": "mit",
"hash": 8182177842844190000,
"line_mean": 36.136,
"line_max": 116,
"alpha_frac": 0.5327445067,
"autogenerated": false,
"ratio": 4.2276867030965395,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5260431209796539,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import sys
import datetime
import re
import sys
import os
import time
import traceback
import cgitb
import cgi
from wsgiref.simple_server import make_server
from collections import OrderedDict as odict
from operator import eq, gt, lt, contains
from shot.exc import RouteFormatError, RouteNotFoundError, process_generic_exc, TemplateSyntaxError, ShotException
from shot.templater import Templater
HEADERS = [
('Content-Type', 'text/html'),
]
settings = dict(
DEBUG=True,
SHOW_TIMER=False,
ENCODING='utf-8',
TEMPLATES_DIR='templates',
BASE_DIR=os.getcwd())
ASSETS_DIR = os.path.dirname(__file__) + '/assets/'
# APP_ROUTES = odict()
APP_ROUTES = []
ROUTES_TO_ADD = []
ROUTE_TYPES = dict(
str='\w+',
int='\d+',
float='[\d\.]+',
path='[\w_\-\./]+',
)
class Route:
def __init__(self, url, status_code, function):
self.url = url
self.status_code = status_code
self.function = function
self.params = []
params_vars = []
for s in re.finditer(r'<\s*(?:(?P<type>str|int|float|path):)?\s*(?P<param>\w+)>\s*', url):
if s:
if s.group('param') in params_vars:
raise RouteFormatError(url, 'Wrong route - repeated parameter')
type_ = s.group('type') or 'str'
if type_ not in ROUTE_TYPES:
raise RouteFormatError(url, 'Wrong parameter type')
self.params.append((type_, s.group('param')))
params_vars.append(s.group('param'))
self.regexp = "^" + url
self.regexp += '?$' if self.regexp.endswith('/') else '/?$'
for t, p in self.params:
self.regexp = re.sub('<(?:{}:)?{}>'.format(t, p), '(?P<{}>{})'.format(p, ROUTE_TYPES[t]), self.regexp)
def __str__(self):
return self.url
def __call__(self, url):
match, kwargs = re.match(self.regexp, url), {}
if match:
for type_, param in self.params:
kwargs[param] = eval(type_ if not type_ == 'path' else 'str')(match.group(param))
return self.status_code, self.function, kwargs
def route(url='', status_code="200 OK"):
def deco(view_function):
new_route = Route(url, status_code, view_function)
APP_ROUTES.append(new_route)
return view_function
return deco
def render(template, context=None):
'Simple wrapper for Templater'
return Templater(template, context).render()
class HTTPRequest:
def __init__(self, environ=None, view_function=None):
self.method = 'GET'
self.GET = {}
self.POST = {}
self.FILES = {}
mapping = dict(
route='PATH_INFO',
uri='RAW_URI',
method='REQUEST_METHOD',
server='SERVER_NAME',
referer='HTTP_REFERER',
agent='HTTP_USER_AGENT',
accept='HTTP_ACCEPT',
language='HTTP_ACCEPT_LANGUAGE',
content_length='CONTENT_LENGTH',
)
if environ:
for x, y in mapping.items(): setattr(self, x, environ.get(y, ''))
try:
if self.method == 'GET' and environ.get('QUERY_STRING', ''):
self.GET.update(dict([x.split("=") for x in environ.get('QUERY_STRING', '').split("&")]))
elif self.method == 'POST':
post = cgi.FieldStorage(
fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=True
)
self._post = post
for field in post:
if getattr(post[field], "filename", None):
self.FILES[field] = post[field].file
elif isinstance(post[field], list):
self.FILES[field], self.POST[field] = [], []
for item in post[field]:
if getattr(item, "filename", None): self.FILES[field].append(item.file)
else: self.POST[field].append(item.value)
for dict_ in (self.FILES, self.POST):
if len(dict_[field]) == 1: dict_[field] = dict_[field][0]
if not dict_[field]: del dict_[field]
else:
self.POST[field] = post.getvalue(field)
except:
pass
if view_function:
self.view_function = view_function
def _show_timer(app):
'Simple timer decorator - show URL and time spent on it after rendering response'
@wraps(app)
def wrapper(environ, *args, **kwargs):
if settings['SHOW_TIMER']:
try:
t1 = time.time()
return app(environ, *args, **kwargs)
finally:
time_data = dict(method=environ['REQUEST_METHOD'],
route=environ['PATH_INFO'],
time=(time.time() - t1)*1000,
host=environ['REMOTE_ADDR'])
print(">>> {host} - {method} {route}: {time:5.3f} ms".format(**time_data))
else:
return app(environ, *args, **kwargs)
return wrapper
@_show_timer
def application(environ, start_response):
if settings['DEBUG']: cgitb.enable()
request = HTTPRequest(environ)
response_started = False
headers = HEADERS
try:
for route in APP_ROUTES:
result = route(environ['PATH_INFO'])
if result:
status_code, view_function, data_kwargs = result
break
else:
raise RouteNotFoundError(request.route)
request.view_function = view_function.__name__
data = view_function(request, **data_kwargs)
headers = [
('Content-type', 'text/html; charset=%s' % settings['ENCODING']),
('Content-Length', str(len(data))),
]
start_response(status_code, headers)
response_started = True
if isinstance(data, str):
return [data.encode(settings.get('ENCODING', 'utf-8'))]
return [data]
except ShotException as err:
if not response_started:
start_response('500 Internal Server Error', headers)
response_started = True
return err.render(request)
except Exception as err:
if not response_started: start_response('500 Internal Server Error', headers)
return process_generic_exc(err, request)
def run(host='', port=8000, app=application):
print("*** Running SHOT dev server on {host}:{port} ***".format(port=port, host=host if host else 'localhost'))
httpd = make_server(host, port, app)
httpd.serve_forever()
| {
"repo_name": "2peppers/shot",
"path": "shot/__init__.py",
"copies": "1",
"size": "6870",
"license": "mit",
"hash": 8146471874253181000,
"line_mean": 35.7379679144,
"line_max": 115,
"alpha_frac": 0.534643377,
"autogenerated": false,
"ratio": 4.0917212626563435,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5126364639656343,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import sys
import logging
from django.db.models.fields import AutoField
from django.db.models.sql import aggregates as sqlaggregates
from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE
from django.db.models.sql.where import AND, OR
from django.db.utils import DatabaseError, IntegrityError
from django.utils.tree import Node
from django.db.models.sql.compiler import MULTI, empty_iter
from google.appengine.api.datastore import Entity, Query, MultiQuery, \
Put, Get, Delete
from google.appengine.api.datastore_errors import Error as GAEError
from google.appengine.api.datastore_types import Key, Text
from google.appengine.ext import db
from djangotoolbox.db.basecompiler import (
NonrelQuery,
NonrelCompiler,
NonrelInsertCompiler,
NonrelUpdateCompiler,
NonrelDeleteCompiler)
from .base import InvalidGaeKey
from .db_settings import get_model_indexes
from .expressions import ExpressionEvaluator
from .utils import commit_locked
from ..fields import AncestorKey
# Valid query types (a dictionary is used for speedy lookups).
OPERATORS_MAP = {
'exact': '=',
'gt': '>',
'gte': '>=',
'lt': '<',
'lte': '<=',
# The following operators are supported with special code below.
'isnull': None,
'in': None,
'startswith': None,
'range': None,
'year': None,
}
# GAE filters used for negated Django lookups.
NEGATION_MAP = {
'gt': '<=',
'gte': '<',
'lt': '>=',
'lte': '>',
# TODO: Support: "'exact': '!='" (it might actually become
# individual '<' and '>' queries).
}
# In some places None is an allowed value, and we need to distinguish
# it from the lack of value.
NOT_PROVIDED = object()
def safe_call(func):
"""
Causes the decorated function to reraise GAE datastore errors as
Django DatabaseErrors.
"""
@wraps(func)
def _func(*args, **kwargs):
try:
return func(*args, **kwargs)
except GAEError, e:
raise DatabaseError, DatabaseError(str(e)), sys.exc_info()[2]
return _func
class GAEQuery(NonrelQuery):
"""
A simple App Engine query: no joins, no distinct, etc.
"""
# ----------------------------------------------
# Public API
# ----------------------------------------------
def __init__(self, compiler, fields):
super(GAEQuery, self).__init__(compiler, fields)
self.inequality_field = None
self.included_pks = None
self.ancestor_key = None
self.excluded_pks = ()
self.has_negated_exact_filter = False
self.ordering = []
self.db_table = self.query.get_meta().db_table
self.pks_only = (len(fields) == 1 and fields[0].primary_key)
start_cursor = getattr(self.query, '_gae_start_cursor', None)
end_cursor = getattr(self.query, '_gae_end_cursor', None)
self.gae_query = [Query(self.db_table, keys_only=self.pks_only,
cursor=start_cursor, end_cursor=end_cursor)]
# This is needed for debugging.
def __repr__(self):
return '<GAEQuery: %r ORDER %r>' % (self.gae_query, self.ordering)
@safe_call
def fetch(self, low_mark=0, high_mark=None):
query = self._build_query()
executed = False
if self.excluded_pks and high_mark is not None:
high_mark += len(self.excluded_pks)
if self.included_pks is not None:
results = self.get_matching_pk(low_mark, high_mark)
else:
if high_mark is None:
kw = {}
if low_mark:
kw['offset'] = low_mark
results = query.Run(**kw)
executed = True
elif high_mark > low_mark:
results = query.Get(high_mark - low_mark, low_mark)
executed = True
else:
results = ()
for entity in results:
if isinstance(entity, Key):
key = entity
else:
key = entity.key()
if key in self.excluded_pks:
continue
yield self._make_entity(entity)
if executed and not isinstance(query, MultiQuery):
try:
self.query._gae_cursor = query.GetCompiledCursor()
except:
pass
@safe_call
def count(self, limit=NOT_PROVIDED):
if self.included_pks is not None:
return len(self.get_matching_pk(0, limit))
if self.excluded_pks:
return len(list(self.fetch(0, 2000)))
# The datastore's Count() method has a 'limit' kwarg, which has
# a default value (obviously). This value can be overridden to
# anything you like, and importantly can be overridden to
# unlimited by passing a value of None. Hence *this* method
# has a default value of NOT_PROVIDED, rather than a default
# value of None
kw = {}
if limit is not NOT_PROVIDED:
kw['limit'] = limit
return self._build_query().Count(**kw)
@safe_call
def delete(self):
if self.included_pks is not None:
keys = [key for key in self.included_pks if key is not None]
else:
keys = self.fetch()
keys = list(keys)
if keys:
Delete(keys)
@safe_call
def order_by(self, ordering):
# GAE doesn't have any kind of natural ordering?
if not isinstance(ordering, bool):
for field, ascending in ordering:
column = '__key__' if field.primary_key else field.column
direction = Query.ASCENDING if ascending else Query.DESCENDING
self.ordering.append((column, direction))
def _decode_child(self, child):
#HACKY: If this is an ancestor lookup, then just special case
#to return the ID, a special ancestor lookup, and the ancestor instance
constraint, lookup_type, annotation, value = child
if constraint.col == '__ancestor':
return ('id', 'ancestor', value)
try:
return super(GAEQuery, self)._decode_child(child)
except InvalidGaeKey:
if not self._negated:
raise
else:
raise DatabaseError("Invalid value for a key lookup on GAE.")
@safe_call
def add_filter(self, field, lookup_type, negated, value):
"""
This function is used by the default add_filters()
implementation.
"""
if lookup_type == 'ancestor':
self.ancestor_key = Key.from_path(value._meta.db_table, value.pk)
return
if lookup_type not in OPERATORS_MAP:
raise DatabaseError("Lookup type %r isn't supported." %
lookup_type)
# GAE does not let you store empty lists, so we can tell
# upfront that queriying for one will return nothing.
if value in ([], ()) and not negated:
self.included_pks = []
return
# Optimization: batch-get by key; this is only suitable for
# primary keys, not for anything that uses the key type.
if field.primary_key and lookup_type in ('exact', 'in'):
if self.included_pks is not None:
raise DatabaseError("You can't apply multiple AND "
"filters on the primary key. "
"Did you mean __in=[...]?")
if not isinstance(value, (tuple, list)):
value = [value]
pks = [pk for pk in value if pk is not None]
if field.rel:
pks = [ Key.from_path(self.db_table, pk.id_or_name()) for pk in pks ]
if negated:
self.excluded_pks = pks
else:
self.included_pks = pks
return
# We check for negation after lookup_type isnull because it
# simplifies the code. All following lookup_type checks assume
# that they're not negated.
if lookup_type == 'isnull':
if (negated and value) or not value:
# TODO/XXX: Is everything greater than None?
op = '>'
else:
op = '='
value = None
elif negated and lookup_type == 'exact':
if self.has_negated_exact_filter:
raise DatabaseError("You can't exclude more than one __exact "
"filter.")
self.has_negated_exact_filter = True
self._combine_filters(field, (('<', value), ('>', value)))
return
elif negated:
try:
op = NEGATION_MAP[lookup_type]
except KeyError:
raise DatabaseError("Lookup type %r can't be negated." %
lookup_type)
if self.inequality_field and field != self.inequality_field:
raise DatabaseError("Can't have inequality filters on "
"multiple fields (here: %r and %r)." %
(field, self.inequality_field))
self.inequality_field = field
elif lookup_type == 'in':
# Create sub-query combinations, one for each value.
if len(self.gae_query) * len(value) > 30:
raise DatabaseError("You can't query against more than "
"30 __in filter value combinations.")
op_values = [('=', v) for v in value]
self._combine_filters(field, op_values)
return
elif lookup_type == 'startswith':
# Lookup argument was converted to [arg, arg + u'\ufffd'].
self._add_filter(field, '>=', value[0])
self._add_filter(field, '<=', value[1])
return
elif lookup_type in ('range', 'year'):
self._add_filter(field, '>=', value[0])
op = '<=' if lookup_type == 'range' else '<'
self._add_filter(field, op, value[1])
return
else:
op = OPERATORS_MAP[lookup_type]
self._add_filter(field, op, value)
# ----------------------------------------------
# Internal API
# ----------------------------------------------
def _add_filter(self, field, op, value):
for query in self.gae_query:
# GAE uses a special property name for primary key filters.
if field.primary_key:
column = '__key__'
else:
column = field.column
key = '%s %s' % (column, op)
if isinstance(value, Text):
raise DatabaseError("TextField is not indexed, by default, "
"so you can't filter on it. Please add "
"an index definition for the field %s "
"on the model %s.%s as described here:\n"
"http://www.allbuttonspressed.com/blog/django/2010/07/Managing-per-field-indexes-on-App-Engine" %
(column, self.query.model.__module__,
self.query.model.__name__))
if key in query:
existing_value = query[key]
if isinstance(existing_value, list):
existing_value.append(value)
else:
query[key] = [existing_value, value]
else:
query[key] = value
def _combine_filters(self, field, op_values):
gae_query = self.gae_query
combined = []
for query in gae_query:
for op, value in op_values:
self.gae_query = [Query(self.db_table,
keys_only=self.pks_only)]
self.gae_query[0].update(query)
self._add_filter(field, op, value)
combined.append(self.gae_query[0])
self.gae_query = combined
def _make_entity(self, entity):
if isinstance(entity, Key):
key = entity
entity = {}
else:
key = entity.key()
entity[self.query.get_meta().pk.column] = key
return entity
@safe_call
def _build_query(self):
for query in self.gae_query:
query.Order(*self.ordering)
#This is an ancestor query
if self.ancestor_key:
query.Ancestor(self.ancestor_key)
if len(self.gae_query) > 1:
return MultiQuery(self.gae_query, self.ordering)
return self.gae_query[0]
def get_matching_pk(self, low_mark=0, high_mark=None):
if not self.included_pks:
return []
results = self.results_match_filters(Get(self.included_pks), self.query.where)
if self.ordering:
results.sort(cmp=self.order_pk_filtered)
results = results[low_mark:high_mark]
return results
def order_pk_filtered(self, lhs, rhs):
left = dict(lhs)
left[self.query.get_meta().pk.column] = lhs.key().to_path()
right = dict(rhs)
right[self.query.get_meta().pk.column] = rhs.key().to_path()
return self._order_in_memory(left, right)
def results_match_filters(self, results, query_where):
"""
[('AND',
[(<django.db.models.fields.CharField: session_key>,
'exact',
datastore_types.Key.from_path(u'django_session', u'128d5afd0780589c84b5edee0333372d', _app=u'dev~g-exams')),
(<django.db.models.fields.DateTimeField: expire_date>,
'gt',
datetime.datetime(2013, 11, 5, 12, 45, 21, 50799))])]
"""
import datetime
from djangotoolbox.db.basecompiler import EMULATED_OPS
class ParseNode(object):
def __init__(self, where):
self.connector = where.connector
self.children = []
self.negated = where.negated
def matches(self, item):
result = self.connector == AND
for child in self.children:
if isinstance(child, ParseNode):
submatch = child.matches(item)
else:
field, lookup_type, lookup_value = child
entity_value = item[field.column]
if entity_value is None:
if isinstance(lookup_value, (datetime.datetime, datetime.date,
datetime.time)):
submatch = lookup_type in ('lt', 'lte')
elif lookup_type in (
'startswith', 'contains', 'endswith', 'iexact',
'istartswith', 'icontains', 'iendswith'):
submatch = False
else:
submatch = EMULATED_OPS[lookup_type](
entity_value, lookup_value)
elif field.primary_key and field.rel and lookup_type == 'exact':
#When we have a foreignkey that's a primary key, things get weird
#the query might be filtering on Key('related_model', id) but we're actually
#looking up on Key('this_model', id). So here we do a kindless comparison
expected = Key.from_path(field.model._meta.db_table, lookup_value.id_or_name())
submatch = expected == entity_value
else:
submatch = EMULATED_OPS[lookup_type](
entity_value, lookup_value)
if self.connector == OR and submatch:
result = True
break
elif self.connector == AND and not submatch:
result = False
break
if self.negated:
return not result
return result
def _parse_tree(_where):
if isinstance(_where, tuple):
return self._decode_child(_where)
node = ParseNode(_where)
for child in _where.children:
if isinstance(child, Node) and child.children:
if len(child.children) == 1:
next_level = _parse_tree(child.children[0])
else:
next_level = _parse_tree(child)
else:
next_level = self._decode_child(child)
node.children.append(next_level)
return node
tree = _parse_tree(query_where)
output = []
for entity in results:
if entity is None:
continue
item = dict(entity)
item[self.query.get_meta().pk.column] = entity.key()
if tree.matches(item):
output.append(entity)
return output
def matches_filters(self, entity):
"""
Checks if the GAE entity fetched from the database satisfies
the current query's constraints.
"""
item = dict(entity)
item[self.query.get_meta().pk.column] = entity.key()
return self._matches_filters(item, self.query.where)
class SQLCompiler(NonrelCompiler):
"""
Base class for all GAE compilers.
"""
query_class = GAEQuery
def get_count(self, check_exists=False):
try:
return super(SQLCompiler, self).get_count(check_exists)
except InvalidGaeKey:
return 0
def execute_sql(self, result_type=MULTI):
try:
return super(SQLCompiler, self).execute_sql(result_type)
except InvalidGaeKey:
if result_type == MULTI:
return empty_iter()
else:
return
def results_iter(self):
try:
for x in super(SQLCompiler, self).results_iter():
yield x
except InvalidGaeKey:
yield iter([]).next()
class SQLInsertCompiler(NonrelInsertCompiler, SQLCompiler):
def execute_sql(self, *a, **kw):
try:
return super(SQLInsertCompiler, self).execute_sql(*a, **kw)
except InvalidGaeKey:
raise DatabaseError("Ivalid value for a key filter on GAE.")
@safe_call
def insert(self, data_list, return_id=False):
opts = self.query.get_meta()
unindexed_fields = get_model_indexes(self.query.model)['unindexed']
unindexed_cols = [opts.get_field(name).column
for name in unindexed_fields]
entity_list = []
ancestor_keys = []
for data in data_list:
properties = {}
kwds = {'unindexed_properties': unindexed_cols}
for column, value in data.items():
# The value will already be a db.Key, but the Entity
# constructor takes a name or id of the key, and will
# automatically create a new key if neither is given.
if column == opts.pk.column:
if value is not None:
if isinstance(value, AncestorKey):
ancestor_keys.append(value)
kwds['id'] = value.id()
kwds['name'] = value.name()
kwds['parent'] = value.parent()
# GAE does not store empty lists (and even does not allow
# passing empty lists to Entity.update) so skip them.
elif isinstance(value, (tuple, list)) and not len(value):
continue
# Use column names as property names.
else:
properties[column] = value
entity = Entity(opts.db_table, **kwds)
entity.update(properties)
entity_list.append(entity)
keys = Put(entity_list)
if ancestor_keys and len(ancestor_keys) == len(keys):
for ancestor_key, key in zip(ancestor_keys, keys):
ancestor_key.key_id = key.id_or_name()
return keys[0] if isinstance(keys, list) else keys
class SQLUpdateCompiler(NonrelUpdateCompiler, SQLCompiler):
def execute_sql(self, result_type=MULTI):
# Modify query to fetch pks only and then execute the query
# to get all pks.
pk_field = self.query.model._meta.pk
self.query.add_immediate_loading([pk_field.name])
pks = [row for row in self.results_iter()]
self.update_entities(pks, pk_field)
return len(pks)
def update_entities(self, pks, pk_field):
for pk in pks:
self.update_entity(pk[0], pk_field)
@commit_locked
def update_entity(self, pk, pk_field):
gae_query = self.build_query()
entity = Get(self.ops.value_for_db(pk, pk_field))
if not gae_query.matches_filters(entity):
return
for field, _, value in self.query.values:
if hasattr(value, 'prepare_database_save'):
value = value.prepare_database_save(field)
else:
value = field.get_db_prep_save(value,
connection=self.connection)
if hasattr(value, 'evaluate'):
assert not value.negated
assert not value.subtree_parents
value = ExpressionEvaluator(value, self.query, entity,
allow_joins=False)
if hasattr(value, 'as_sql'):
value = value.as_sql(lambda n: n, self.connection)
entity[field.column] = self.ops.value_for_db(value, field)
Put(entity)
class SQLDeleteCompiler(NonrelDeleteCompiler, SQLCompiler):
pass
| {
"repo_name": "potatolondon/djangoappengine-1-4",
"path": "db/compiler.py",
"copies": "1",
"size": "22103",
"license": "bsd-3-clause",
"hash": 6222663851229825000,
"line_mean": 35.3536184211,
"line_max": 133,
"alpha_frac": 0.529611365,
"autogenerated": false,
"ratio": 4.351840913565662,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5381452278565662,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import sys
import os
from datetime import datetime
import logging
import requests
class TinyETL:
"""Manages facts about an ETL Process.
Provides a consistent interface for storing log location,
temporary data locations, and a way to facilitate dry-run
and logging on Fabric-based ETL scripts.
USAGE:
=====
etl = TinyETL(
'an_etl_job',
long_desc,
env=env, # This `env` will be provided by Fabric. [from fabric.api import env]
log_dir="/path/to/a/log/directory",
tmpdata_dir="/path/to/tmpdata/directory",
# Optionally, Create additional runtime attributes here
another_relevant_dir="path/to/relevant/dir"
)
Instantiating this object will alter the behavior of your fabfile.py.
Specifically, fab will require you to set the `dry_run` parameter explicitly
if you'll be invoking a task.
`fab --list` will work as expected.
`fab main_task` will complain that `dry_run` has not be explicitly set.
INVOCATION:
==========
`fab main_task --set dry_run=True`
LOG DECORATOR:
=============
This also provides a decorator for any tasks you want to log.
Apply `@etl.log` as the innermost decorator to a task and it
will be logged.
"""
def __init__(self, name, long_desc, env, log_dir, tmpdata_dir, **kwargs):
"""
name [str] -> Short name to ETL task. Used in creating logfile names.
long_desc [str] -> Docstring description of this task.
env [env object] -> The env object provided by Fabric.
log_dir [str] -> Absolute path to the directory to store logs in.
tmpdata_dir [str] -> Absolute path to the directory to store temp data in.
"""
# If there are no tasks to be run at invocation,
# don't bother with the rest of the object __init__
if env.tasks == []:
return
self.name = name
self.long_desc = long_desc
self.dry_run = self._this_is_a_dry_run(env)
self.log_dir = log_dir
self.tmpdata_dir = tmpdata_dir
if not self.dry_run:
self.logname = "{}_{}".format(self.name, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
self.logfile = os.path.join(self.log_dir, self.logname + '.log')
self.logger = self._create_logger()
# This allows the user to store relevant data on the
# object they've created, without needing to anticipate
# every possible type of value a user may want to store.
self.__dict__.update(kwargs)
def usage(self):
msg = "Please provide either 'True' or 'False' to dry_run.\n"
msg += "Usage: fab <tasks> --set dry_run=[True|False]"
raise SystemExit(msg)
def _this_is_a_dry_run(self, env):
""" Determines if this is a dry run. """
try:
dry_run = env.dry_run
except AttributeError:
self.usage()
if dry_run not in ('True', 'False'):
self.usage()
else:
# Convert the passed-in string val to a bool before returning
return {'True': True, 'False': False}.get(dry_run)
def _create_logger(self):
# See https://wingware.com/psupport/python-manual/2.3/lib/node304.html
logger = logging.getLogger(self.name)
hdlr = logging.FileHandler(self.logfile)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)
return logger
def log(self, f):
@wraps(f)
def logwrapper(*args, **kwargs):
if self.dry_run:
print('[DRY RUN] :: {}()'.format(f.__name__))
else:
current_info = "Running {}".format(f.__name__)
print(current_info)
self.logger.info(current_info)
try:
return f(*args, **kwargs)
except Exception:
self.logger.exception("ETL Error")
raise
return logwrapper
def timestamp(self):
return datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
def download_file(self, endpoint, file_to_write_to):
r = requests.get(endpoint)
if r.status_code != 200:
self.logger.error("Attempt to download {} failed with code {}.".format(endpoint, r.status_code))
else:
with open(file_to_write_to, "wb") as f:
f.write(r.content)
def __str__(self):
info = """
Standard Attributes:
===================
ETL Name: {}
Long Description: {}
Log location: {}
Temp data location: {}
""".format(self.name, self.long_desc, self.log_dir, self.tmpdata_dir)
standard = ('name', 'long_desc', 'log_dir', 'tmpdata_dir', 'logger', 'dry_run')
user_defined_attrs = ""
for k, v in self.__dict__.iteritems():
if k not in standard:
user_defined_attrs += "{}: {}\n".format(k.title(), v)
if user_defined_attrs == "":
return info
else:
user_defined_attrs = "\nUser-defined Attributes:\n" + "=======================\n\n" + user_defined_attrs
return info + user_defined_attrs
| {
"repo_name": "joedougherty/tinyetl",
"path": "tinyetl/__init__.py",
"copies": "1",
"size": "5343",
"license": "mit",
"hash": -6326519522383648000,
"line_mean": 32.6037735849,
"line_max": 116,
"alpha_frac": 0.5730862811,
"autogenerated": false,
"ratio": 3.8801742919389977,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4953260573038998,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import sys
from django.db.models.fields import AutoField
from django.db.models.sql import aggregates as sqlaggregates
from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE
from django.db.models.sql.where import AND, OR
from django.db.utils import DatabaseError, IntegrityError
from django.utils.tree import Node
from google.appengine.api.datastore import Entity, Query, MultiQuery, \
Put, Get, Delete
from google.appengine.api.datastore_errors import Error as GAEError
from google.appengine.api.datastore_types import Key, Text
from djangotoolbox.db.basecompiler import (
NonrelQuery,
NonrelCompiler,
NonrelInsertCompiler,
NonrelUpdateCompiler,
NonrelDeleteCompiler)
from .db_settings import get_model_indexes
from .expressions import ExpressionEvaluator
from .utils import commit_locked
# Valid query types (a dictionary is used for speedy lookups).
OPERATORS_MAP = {
'exact': '=',
'gt': '>',
'gte': '>=',
'lt': '<',
'lte': '<=',
# The following operators are supported with special code below.
'isnull': None,
'in': None,
'startswith': None,
'range': None,
'year': None,
}
# GAE filters used for negated Django lookups.
NEGATION_MAP = {
'gt': '<=',
'gte': '<',
'lt': '>=',
'lte': '>',
# TODO: Support: "'exact': '!='" (it might actually become
# individual '<' and '>' queries).
}
# In some places None is an allowed value, and we need to distinguish
# it from the lack of value.
NOT_PROVIDED = object()
def safe_call(func):
"""
Causes the decorated function to reraise GAE datastore errors as
Django DatabaseErrors.
"""
@wraps(func)
def _func(*args, **kwargs):
try:
return func(*args, **kwargs)
except GAEError, e:
raise DatabaseError, DatabaseError(str(e)), sys.exc_info()[2]
return _func
class GAEQuery(NonrelQuery):
"""
A simple App Engine query: no joins, no distinct, etc.
"""
# ----------------------------------------------
# Public API
# ----------------------------------------------
def __init__(self, compiler, fields):
super(GAEQuery, self).__init__(compiler, fields)
self.inequality_field = None
self.included_pks = None
self.excluded_pks = ()
self.has_negated_exact_filter = False
self.ordering = []
self.db_table = self.query.get_meta().db_table
self.pks_only = (len(fields) == 1 and fields[0].primary_key)
start_cursor = getattr(self.query, '_gae_start_cursor', None)
end_cursor = getattr(self.query, '_gae_end_cursor', None)
self.gae_query = [Query(self.db_table, keys_only=self.pks_only,
cursor=start_cursor, end_cursor=end_cursor)]
# This is needed for debugging.
def __repr__(self):
return '<GAEQuery: %r ORDER %r>' % (self.gae_query, self.ordering)
@safe_call
def fetch(self, low_mark, high_mark):
query = self._build_query()
executed = False
if self.excluded_pks and high_mark is not None:
high_mark += len(self.excluded_pks)
if self.included_pks is not None:
results = self.get_matching_pk(low_mark, high_mark)
else:
if high_mark is None:
kw = {}
if low_mark:
kw['offset'] = low_mark
results = query.Run(**kw)
executed = True
elif high_mark > low_mark:
results = query.Get(high_mark - low_mark, low_mark)
executed = True
else:
results = ()
for entity in results:
if isinstance(entity, Key):
key = entity
else:
key = entity.key()
if key in self.excluded_pks:
continue
yield self._make_entity(entity)
if executed and not isinstance(query, MultiQuery):
try:
self.query._gae_cursor = query.GetCompiledCursor()
except:
pass
@safe_call
def count(self, limit=NOT_PROVIDED):
if self.included_pks is not None:
return len(self.get_matching_pk(0, limit))
if self.excluded_pks:
return len(list(self.fetch(0, 2000)))
# The datastore's Count() method has a 'limit' kwarg, which has
# a default value (obviously). This value can be overridden to
# anything you like, and importantly can be overridden to
# unlimited by passing a value of None. Hence *this* method
# has a default value of NOT_PROVIDED, rather than a default
# value of None
kw = {}
if limit is not NOT_PROVIDED:
kw['limit'] = limit
return self._build_query().Count(**kw)
@safe_call
def delete(self):
if self.included_pks is not None:
keys = [key for key in self.included_pks if key is not None]
else:
keys = self.fetch()
if keys:
Delete(keys)
@safe_call
def order_by(self, ordering):
# GAE doesn't have any kind of natural ordering?
if not isinstance(ordering, bool):
for field, ascending in ordering:
column = '__key__' if field.primary_key else field.column
direction = Query.ASCENDING if ascending else Query.DESCENDING
self.ordering.append((column, direction))
@safe_call
def add_filter(self, field, lookup_type, negated, value):
"""
This function is used by the default add_filters()
implementation.
"""
if lookup_type not in OPERATORS_MAP:
raise DatabaseError("Lookup type %r isn't supported." %
lookup_type)
# GAE does not let you store empty lists, so we can tell
# upfront that queriying for one will return nothing.
if value in ([], ()):
self.included_pks = []
return
# Optimization: batch-get by key; this is only suitable for
# primary keys, not for anything that uses the key type.
if field.primary_key and lookup_type in ('exact', 'in'):
if self.included_pks is not None:
raise DatabaseError("You can't apply multiple AND "
"filters on the primary key. "
"Did you mean __in=[...]?")
if not isinstance(value, (tuple, list)):
value = [value]
pks = [pk for pk in value if pk is not None]
if negated:
self.excluded_pks = pks
else:
self.included_pks = pks
return
# We check for negation after lookup_type isnull because it
# simplifies the code. All following lookup_type checks assume
# that they're not negated.
if lookup_type == 'isnull':
if (negated and value) or not value:
# TODO/XXX: Is everything greater than None?
op = '>'
else:
op = '='
value = None
elif negated and lookup_type == 'exact':
if self.has_negated_exact_filter:
raise DatabaseError("You can't exclude more than one __exact "
"filter.")
self.has_negated_exact_filter = True
self._combine_filters(field, (('<', value), ('>', value)))
return
elif negated:
try:
op = NEGATION_MAP[lookup_type]
except KeyError:
raise DatabaseError("Lookup type %r can't be negated." %
lookup_type)
if self.inequality_field and field != self.inequality_field:
raise DatabaseError("Can't have inequality filters on "
"multiple fields (here: %r and %r)." %
(field, self.inequality_field))
self.inequality_field = field
elif lookup_type == 'in':
# Create sub-query combinations, one for each value.
if len(self.gae_query) * len(value) > 30:
raise DatabaseError("You can't query against more than "
"30 __in filter value combinations.")
op_values = [('=', v) for v in value]
self._combine_filters(field, op_values)
return
elif lookup_type == 'startswith':
# Lookup argument was converted to [arg, arg + u'\ufffd'].
self._add_filter(field, '>=', value[0])
self._add_filter(field, '<=', value[1])
return
elif lookup_type in ('range', 'year'):
self._add_filter(field, '>=', value[0])
op = '<=' if lookup_type == 'range' else '<'
self._add_filter(field, op, value[1])
return
else:
op = OPERATORS_MAP[lookup_type]
self._add_filter(field, op, value)
# ----------------------------------------------
# Internal API
# ----------------------------------------------
def _add_filter(self, field, op, value):
for query in self.gae_query:
# GAE uses a special property name for primary key filters.
if field.primary_key:
column = '__key__'
else:
column = field.column
key = '%s %s' % (column, op)
if isinstance(value, Text):
raise DatabaseError("TextField is not indexed, by default, "
"so you can't filter on it. Please add "
"an index definition for the field %s "
"on the model %s.%s as described here:\n"
"http://www.allbuttonspressed.com/blog/django/2010/07/Managing-per-field-indexes-on-App-Engine" %
(column, self.query.model.__module__,
self.query.model.__name__))
if key in query:
existing_value = query[key]
if isinstance(existing_value, list):
existing_value.append(value)
else:
query[key] = [existing_value, value]
else:
query[key] = value
def _combine_filters(self, field, op_values):
gae_query = self.gae_query
combined = []
for query in gae_query:
for op, value in op_values:
self.gae_query = [Query(self.db_table,
keys_only=self.pks_only)]
self.gae_query[0].update(query)
self._add_filter(field, op, value)
combined.append(self.gae_query[0])
self.gae_query = combined
def _make_entity(self, entity):
if isinstance(entity, Key):
key = entity
entity = {}
else:
key = entity.key()
entity[self.query.get_meta().pk.column] = key
return entity
@safe_call
def _build_query(self):
for query in self.gae_query:
query.Order(*self.ordering)
if len(self.gae_query) > 1:
return MultiQuery(self.gae_query, self.ordering)
return self.gae_query[0]
def get_matching_pk(self, low_mark=0, high_mark=None):
if not self.included_pks:
return []
results = [result for result in Get(self.included_pks)
if result is not None and
self.matches_filters(result)]
if self.ordering:
results.sort(cmp=self.order_pk_filtered)
if high_mark is not None and high_mark < len(results) - 1:
results = results[:high_mark]
if low_mark:
results = results[low_mark:]
return results
def order_pk_filtered(self, lhs, rhs):
left = dict(lhs)
left[self.query.get_meta().pk.column] = lhs.key().to_path()
right = dict(rhs)
right[self.query.get_meta().pk.column] = rhs.key().to_path()
return self._order_in_memory(left, right)
def matches_filters(self, entity):
"""
Checks if the GAE entity fetched from the database satisfies
the current query's constraints.
"""
item = dict(entity)
item[self.query.get_meta().pk.column] = entity.key()
return self._matches_filters(item, self.query.where)
class SQLCompiler(NonrelCompiler):
"""
Base class for all GAE compilers.
"""
query_class = GAEQuery
class SQLInsertCompiler(NonrelInsertCompiler, SQLCompiler):
@safe_call
def insert(self, data, return_id=False):
opts = self.query.get_meta()
unindexed_fields = get_model_indexes(self.query.model)['unindexed']
kwds = {'unindexed_properties': []}
properties = {}
for field, value in data.iteritems():
# The value will already be a db.Key, but the Entity
# constructor takes a name or id of the key, and will
# automatically create a new key if neither is given.
if field.primary_key:
if value is not None:
kwds['id'] = value.id()
kwds['name'] = value.name()
# GAE does not store empty lists (and even does not allow
# passing empty lists to Entity.update) so skip them.
elif isinstance(value, (tuple, list)) and not len(value):
continue
# Use column names as property names.
else:
properties[field.column] = value
if field in unindexed_fields:
kwds['unindexed_properties'].append(field.column)
entity = Entity(opts.db_table, **kwds)
entity.update(properties)
return Put(entity)
class SQLUpdateCompiler(NonrelUpdateCompiler, SQLCompiler):
def execute_sql(self, result_type=MULTI):
# Modify query to fetch pks only and then execute the query
# to get all pks.
pk_field = self.query.model._meta.pk
self.query.add_immediate_loading([pk_field.name])
pks = [row for row in self.results_iter()]
self.update_entities(pks, pk_field)
return len(pks)
def update_entities(self, pks, pk_field):
for pk in pks:
self.update_entity(pk[0], pk_field)
@commit_locked
def update_entity(self, pk, pk_field):
gae_query = self.build_query()
entity = Get(self.ops.value_for_db(pk, pk_field))
if not gae_query.matches_filters(entity):
return
for field, _, value in self.query.values:
if hasattr(value, 'prepare_database_save'):
value = value.prepare_database_save(field)
else:
value = field.get_db_prep_save(value,
connection=self.connection)
if hasattr(value, 'evaluate'):
assert not value.negated
assert not value.subtree_parents
value = ExpressionEvaluator(value, self.query, entity,
allow_joins=False)
if hasattr(value, 'as_sql'):
value = value.as_sql(lambda n: n, self.connection)
entity[field.column] = self.ops.value_for_db(value, field)
Put(entity)
class SQLDeleteCompiler(NonrelDeleteCompiler, SQLCompiler):
pass
| {
"repo_name": "adamjmcgrath/glancydesign",
"path": "src/djangoappengine/djangoappengine/db/compiler.py",
"copies": "17",
"size": "15681",
"license": "bsd-3-clause",
"hash": -3842851499709031400,
"line_mean": 34.9655963303,
"line_max": 133,
"alpha_frac": 0.5428862955,
"autogenerated": false,
"ratio": 4.230105206366334,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import sys
import capybara
from capybara import DSL_METHODS as PACKAGE_METHODS
from capybara.session import DSL_METHODS as SESSION_METHODS, Session
__all__ = ["page"] + SESSION_METHODS + PACKAGE_METHODS
class Page(object):
""" A proxy for the current session. """
def __getattr__(self, attr):
return getattr(capybara.current_session(), attr)
page = Page()
""" The singleton current-session proxy object. """
class DSLMixin:
""" A mixin for including DSL methods in another class. """
pass
_module_name = globals()["__name__"]
_module = sys.modules[_module_name]
def _define_package_method(name):
func = getattr(capybara, name)
setattr(DSLMixin, name, func)
setattr(_module, name, func)
def _define_session_method(name):
@wraps(getattr(Session, name))
def func(*args, **kwargs):
return getattr(page, name)(*args, **kwargs)
setattr(DSLMixin, name, func)
setattr(_module, name, func)
for _method in PACKAGE_METHODS:
_define_package_method(_method)
for _method in SESSION_METHODS:
_define_session_method(_method)
| {
"repo_name": "elliterate/capybara.py",
"path": "capybara/dsl.py",
"copies": "1",
"size": "1126",
"license": "mit",
"hash": 6873009616045145000,
"line_mean": 20.6538461538,
"line_max": 68,
"alpha_frac": 0.6811722913,
"autogenerated": false,
"ratio": 3.4224924012158056,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46036646925158053,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import sys
def memo(func):
cache = {}
@wraps(func)
def wrap(*arg):
if arg not in cache:
cache[arg] = func(*arg)
print cache
return wrap
class TailRecurseException:
def __init__(self, args, kwargs):
self.args = args
self.kwargs = kwargs
def tailcall(g):
def func(*args, **kwargs):
f = sys._getframe()
if f.f_back and f.f_back.f_back and \
f.f_back.f_back.f_code == f.f_code:
raise TailRecurseException(args, kwargs)
else:
while 1:
try:
return g(*args, **kwargs)
except TailRecurseException, e:
args = e.args
kwargs = e.kwargs
return func
@tailcall
def cntCollatz(n, acc=0):
if n == 1:
return acc
if n % 2 == 0:
return cntCollatz(n / 2, acc + 1)
else:
return cntCollatz(3 * n + 1, acc + 1)
print cntCollatz(13)
sys.exit()
mv = 0
mi = 1
for i in range(10 ** 6, 0, -1):
v = cntCollatz(i)
print "%10d %10d\t%10d %10d" % (i, v, mi, mv)
if v > mv:
mv = v
mi = i
| {
"repo_name": "liuyang1/euler",
"path": "014_1.py",
"copies": "1",
"size": "1185",
"license": "mit",
"hash": 2508842122174131700,
"line_mean": 20.1607142857,
"line_max": 52,
"alpha_frac": 0.4978902954,
"autogenerated": false,
"ratio": 3.2465753424657535,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4244465637865753,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import sys
# sys.path.append('../')
from util.util import print_trace_exception
class ServiceException(Exception):
pass
class ServiceHandlerMissingException(ServiceException):
pass
def register(_self):
def outer_wrapper(func):
_self.add_handler(func)
@wraps(func)
def inner_wrapper(*args, **kwargs):
return func(*args, **kwargs)
return inner_wrapper
return outer_wrapper
def handler(func):
_id = BaseService.get_id(func)
@wraps(func)
def func_wrapper(*args, **kwargs):
'''
Append id to return
:param args:
:param kwargs:
:return: function
'''
re = func(*args, **kwargs)
re.update({'id': _id})
return re
return func_wrapper
class BaseService(object):
def __init__(self, service_name):
self.handler = {}
self.handler_keys = []
self.service_name = service_name
def add_handler(self, func):
assert callable(func)
key = BaseService.get_id(func)
self.handler_keys.append(key)
self.handler[key] = func
def del_handler(self, key): pass
def __getitem__(self, key):
return self.get_handler(key)
def get_handler(self, key):
assert isinstance(key, (unicode, str)) is True
try:
return self.handler[key]
except KeyError:
print_trace_exception()
raise ServiceHandlerMissingException()
def get_handler_list(self):
return self.handler.keys()
def get_handler_id(self, key):
self.get_handler(key)
return '{}_{}'.format(self.service_name, key)
def get_name(self):
return self.service_name
@classmethod
def get_id(cls, func):
return str(func.__name__).upper()
| {
"repo_name": "dyf102/Gomoku-online",
"path": "server/service/baseservice.py",
"copies": "1",
"size": "1839",
"license": "apache-2.0",
"hash": 153375165129741000,
"line_mean": 21.9875,
"line_max": 55,
"alpha_frac": 0.5927134312,
"autogenerated": false,
"ratio": 3.9463519313304722,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5039065362530473,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import time
from flask import Flask, abort, request, render_template
import backend.utils as utils
from backend.grid_manager import GridManager
app = Flask(__name__)
TOKEN = "abc"
GMS = dict(mgm=None, hgm=None)
BOXES = []
@app.after_request
def add_cors(resp):
""" Ensure all responses have the CORS headers. This ensures any failures are also accessible
by the client. """
resp.headers['Access-Control-Allow-Origin'] = request.headers.get('Origin', '*')
resp.headers['Access-Control-Allow-Credentials'] = 'true'
resp.headers['Access-Control-Allow-Methods'] = 'POST, OPTIONS, GET'
resp.headers['Access-Control-Allow-Headers'] = request.headers.get('Access-Control-Request-Headers',
'Authorization')
# set low for debugging
if app.debug:
resp.headers['Access-Control-Max-Age'] = '1'
return resp
def owns_token(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if TOKEN != kwargs['token']:
abort(403)
return f(*args, **kwargs)
return decorated_function
@app.route("/<token>")
@owns_token
def hello(token):
return render_template('index.html')
@app.route("/init-grid/<int:obj_count>/<int:x_size>/<int:y_size>/<token>")
@owns_token
def initialize_grid(obj_count, x_size, y_size, token):
# for example http://localhost:5000/init-grid/10/600/600/abc
GMS['mgm'] = GridManager(utils.StoreType.matrix, obj_count, x_size, y_size, 40)
GMS['hgm'] = GridManager(utils.StoreType.hashed, obj_count, x_size, y_size, 40)
BOXES = utils.generate_objects(obj_count, x_size, y_size, 120, 120, 40)
for box in BOXES:
GMS['mgm'].add_box(box)
GMS['hgm'].add_box(box)
return utils.get_grids_json(GMS, BOXES)
@app.route("/move-objects/<token>")
def move_objects(token):
times = []
try:
start = time.time()
GMS['mgm'].update_boxes()
times.append(float('%.10f' % (time.time() - start)))
start = time.time()
GMS['hgm'].update_boxes()
times.append(float('%.10f' % (time.time() - start)))
return utils.get_grids_json(GMS, GMS['mgm'].boxes.values(), times)
except (ValueError, TypeError):
return abort(403)
if __name__ == "__main__":
app.debug = True
app.run()
| {
"repo_name": "mgpjohny/grid-pair-management",
"path": "main.py",
"copies": "1",
"size": "2366",
"license": "mit",
"hash": 8959414316161748000,
"line_mean": 29.3333333333,
"line_max": 104,
"alpha_frac": 0.6213017751,
"autogenerated": false,
"ratio": 3.34180790960452,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.446310968470452,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import time
from hashlib import md5
import threading
class memoize(object):
""" Memoize the results of a function. Supports an optional timeout
for automatic cache expiration.
If the optional manual_flush argument is True, a function called
"flush_cache" will be added to the wrapped function. When
called, it will remove all the timed out values from the cache.
If you use this decorator as a class method, you must specify
instance_method=True otherwise you will have a single shared
cache for every instance of your class.
This decorator is thread safe.
"""
def __init__(self, timeout=None, manual_flush=False, instance_method=False):
self.timeout = timeout
self.manual_flush = manual_flush
self.instance_method = instance_method
self.cache = {}
self.cache_lock = threading.RLock()
def __call__(self, fn):
if self.instance_method:
@wraps(fn)
def rewrite_instance_method(instance, *args, **kwargs):
# the first time we are called we overwrite the method
# on the class instance with a new memoize instance
if hasattr(instance, fn.__name__):
bound_fn = fn.__get__(instance, instance.__class__)
new_memoizer = memoize(self.timeout, self.manual_flush)(bound_fn)
setattr(instance, fn.__name__, new_memoizer)
return getattr(instance, fn.__name__)(*args, **kwargs)
return rewrite_instance_method
def flush_cache():
with self.cache_lock:
for key in self.cache.keys():
if (time.time() - self.cache[key][1]) > self.timeout:
del(self.cache[key])
@wraps(fn)
def wrapped(*args, **kwargs):
kw = kwargs.items()
kw.sort()
key_str = repr((args, kw))
key = md5(key_str).hexdigest()
with self.cache_lock:
try:
result, cache_time = self.cache[key]
if self.timeout is not None and (time.time() - cache_time) > self.timeout:
raise KeyError
except KeyError:
result, _ = self.cache[key] = (fn(*args, **kwargs), time.time())
if not self.manual_flush and self.timeout is not None:
flush_cache()
return result
if self.manual_flush:
wrapped.flush_cache = flush_cache
return wrapped
| {
"repo_name": "carlsverre/wraptor",
"path": "wraptor/decorators/memoize.py",
"copies": "3",
"size": "2632",
"license": "mit",
"hash": -5729054447542369000,
"line_mean": 36.6,
"line_max": 94,
"alpha_frac": 0.5623100304,
"autogenerated": false,
"ratio": 4.545768566493955,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0012820409790068816,
"num_lines": 70
} |
from functools import wraps
import time
import inspect
from . import helpers
def memoize(ttl_spec, whitelist=None, blacklist=None,
key_fn=helpers._json_keyify, backend=lambda fn: dict(),
get_now=time.time):
""" memoize/cache the decorated function for ttl amount of time """
ttl = helpers._time_code_to_seconds(ttl_spec)
def wrapper(fn):
sig = inspect.getfullargspec(fn)
cache = backend(fn)
@wraps(fn)
def wrapper2(*args, **kwargs):
# extract the arg names and values to use in our memoize key
to_use = helpers._extract_args(sig.args, sig.defaults, args, kwargs,
whitelist, blacklist)
# and construct our memoize key
key = key_fn(to_use)
now = get_now()
needs_refresh = True
# we have a cached value already, let's check if it's old and needs
# to be refreshed
if key in cache:
inserted, res = cache[key]
needs_refresh = now - inserted > ttl
# if it's old, re-call the decorated function and re-cache the
# result with a new timestamp
if needs_refresh:
res = fn(*args, **kwargs)
cache[key] = (now, res)
return res
return wrapper2
return wrapper
| {
"repo_name": "tuborgclassic/carlsberg",
"path": "memoize/decorator.py",
"copies": "1",
"size": "1369",
"license": "mit",
"hash": 4896249363222992000,
"line_mean": 28.7608695652,
"line_max": 80,
"alpha_frac": 0.565376187,
"autogenerated": false,
"ratio": 4.264797507788162,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5330173694788162,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import time
import logging
from peewee import Model
import peewee
from playhouse.postgres_ext import PostgresqlExtDatabase
from redash import statsd_client
metrics_logger = logging.getLogger("metrics")
class MeteredPostgresqlExtDatabase(PostgresqlExtDatabase):
def __init__(self, *args, **kwargs):
self.query_count = 0
self.query_duration = 0
return super(MeteredPostgresqlExtDatabase, self).__init__(*args, **kwargs)
def execute_sql(self, *args, **kwargs):
start_time = time.time()
try:
result = super(MeteredPostgresqlExtDatabase, self).execute_sql(*args, **kwargs)
return result
finally:
self.query_count += 1
# TODO: there is a noticeable few ms discrepancy between the duration here and the one calculated in
# metered_execute. Need to think what to do about it.
duration = (time.time() - start_time) * 1000
self.query_duration += duration
def reset_metrics(self):
# TODO: instead of manually managing reset of metrics, we should store them in a LocalProxy based object, that
# is guaranteed to be "replaced" when the current request is done.
self.query_count = 0
self.query_duration = 0
def patch_query_execute():
real_execute = peewee.Query._execute
real_clone = peewee.Query.clone
@wraps(real_execute)
def metered_execute(self, *args, **kwargs):
name = self.model_class.__name__
action = getattr(self, 'model_action', 'unknown')
start_time = time.time()
try:
result = real_execute(self, *args, **kwargs)
return result
finally:
duration = (time.time() - start_time) * 1000
statsd_client.timing('db.{}.{}'.format(name, action), duration)
metrics_logger.debug("model=%s query=%s duration=%.2f", name, action, duration)
@wraps(real_clone)
def extended_clone(self):
cloned = real_clone(self)
setattr(cloned, 'model_action', getattr(self, 'model_action', 'unknown'))
return cloned
peewee.Query._execute = metered_execute
peewee.Query.clone = extended_clone
patch_query_execute()
class MeteredModel(Model):
@classmethod
def select(cls, *args, **kwargs):
return cls._execute_and_measure('select', args, kwargs)
@classmethod
def update(cls, *args, **kwargs):
return cls._execute_and_measure('update', args, kwargs)
@classmethod
def insert(cls, *args, **kwargs):
return cls._execute_and_measure('insert', args, kwargs)
@classmethod
def insert_many(cls, *args, **kwargs):
return cls._execute_and_measure('insert_many', args, kwargs)
@classmethod
def insert_from(cls, *args, **kwargs):
return cls._execute_and_measure('insert_from', args, kwargs)
@classmethod
def delete(cls, *args, **kwargs):
return cls._execute_and_measure('delete', args, kwargs)
@classmethod
def raw(cls, *args, **kwargs):
return cls._execute_and_measure('raw', args, kwargs)
@classmethod
def _execute_and_measure(cls, action, args, kwargs):
result = getattr(super(MeteredModel, cls), action)(*args, **kwargs)
setattr(result, 'model_action', action)
return result
| {
"repo_name": "olivetree123/redash-x",
"path": "redash/metrics/database.py",
"copies": "1",
"size": "3358",
"license": "bsd-2-clause",
"hash": -7198786763810441000,
"line_mean": 31.9215686275,
"line_max": 118,
"alpha_frac": 0.6381774866,
"autogenerated": false,
"ratio": 3.8955916473317864,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5033769133931786,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import time
import logging
from playhouse.gfk import Model
import peewee
from playhouse.postgres_ext import PostgresqlExtDatabase
from redash import statsd_client
metrics_logger = logging.getLogger("metrics")
class MeteredPostgresqlExtDatabase(PostgresqlExtDatabase):
def __init__(self, *args, **kwargs):
self.query_count = 0
self.query_duration = 0
return super(MeteredPostgresqlExtDatabase, self).__init__(*args, **kwargs)
def execute_sql(self, *args, **kwargs):
start_time = time.time()
try:
result = super(MeteredPostgresqlExtDatabase, self).execute_sql(*args, **kwargs)
return result
finally:
self.query_count += 1
# TODO: there is a noticeable few ms discrepancy between the duration here and the one calculated in
# metered_execute. Need to think what to do about it.
duration = (time.time() - start_time) * 1000
self.query_duration += duration
def reset_metrics(self):
# TODO: instead of manually managing reset of metrics, we should store them in a LocalProxy based object, that
# is guaranteed to be "replaced" when the current request is done.
self.query_count = 0
self.query_duration = 0
def patch_query_execute():
real_execute = peewee.Query._execute
real_clone = peewee.Query.clone
@wraps(real_execute)
def metered_execute(self, *args, **kwargs):
name = self.model_class.__name__
action = getattr(self, 'model_action', 'unknown')
start_time = time.time()
try:
result = real_execute(self, *args, **kwargs)
return result
finally:
duration = (time.time() - start_time) * 1000
statsd_client.timing('db.{}.{}'.format(name, action), duration)
metrics_logger.debug("model=%s query=%s duration=%.2f", name, action, duration)
@wraps(real_clone)
def extended_clone(self):
cloned = real_clone(self)
setattr(cloned, 'model_action', getattr(self, 'model_action', 'unknown'))
return cloned
peewee.Query._execute = metered_execute
peewee.Query.clone = extended_clone
patch_query_execute()
class MeteredModel(Model):
@classmethod
def select(cls, *args, **kwargs):
return cls._execute_and_measure('select', args, kwargs)
@classmethod
def update(cls, *args, **kwargs):
return cls._execute_and_measure('update', args, kwargs)
@classmethod
def insert(cls, *args, **kwargs):
return cls._execute_and_measure('insert', args, kwargs)
@classmethod
def insert_many(cls, *args, **kwargs):
return cls._execute_and_measure('insert_many', args, kwargs)
@classmethod
def insert_from(cls, *args, **kwargs):
return cls._execute_and_measure('insert_from', args, kwargs)
@classmethod
def delete(cls, *args, **kwargs):
return cls._execute_and_measure('delete', args, kwargs)
@classmethod
def raw(cls, *args, **kwargs):
return cls._execute_and_measure('raw', args, kwargs)
@classmethod
def _execute_and_measure(cls, action, args, kwargs):
result = getattr(super(MeteredModel, cls), action)(*args, **kwargs)
setattr(result, 'model_action', action)
return result
| {
"repo_name": "jmvasquez/redashtest",
"path": "redash/metrics/database.py",
"copies": "6",
"size": "3365",
"license": "bsd-2-clause",
"hash": -2488997687631142400,
"line_mean": 31.9901960784,
"line_max": 118,
"alpha_frac": 0.6386329866,
"autogenerated": false,
"ratio": 3.894675925925926,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7533308912525926,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import time
import traceback
import inspect
import threading
import types
class LoggerException(Exception):
pass
class ReturnTypeMismatchError(Exception):
pass
def _raiseReturnTypeMismatchError(expctedType, actualType):
raise ReturnTypeMismatchError(
"Type mismatch: "
"Expected is %s but got %s" % (expctedType, actualType))
def returns(returnType):
def realDecorator(f):
@wraps(f)
def wrapperMethod(*args, **kwds):
result= f(*args, **kwds)
if not isinstance(result, returnType):
resultType= type(result)
_raiseReturnTypeMismatchError(returnType, resultType)
return result
return wrapperMethod
return realDecorator
def returnsNone(f):
return returns(type(None))(f)
def returnsForExample(exampleInstance):
def realDecorator(f):
@wraps(f)
def wrapperMethod(*args, **kwds):
result= f(*args, **kwds)
resultType= type(result)
exampleType= type(exampleInstance)
if resultType != exampleType:
_raiseReturnTypeMismatchError(exampleType, resultType)
return result
return wrapperMethod
return realDecorator
def suppressException(resultInCaseOfFailure= None):
def decorate(f):
@wraps(f)
def wrapper(self, *args, **kwds):
try:
return f(self, *args, **kwds)
except Exception as e:
self._logger.error(str(e))
traceback.print_exc()
return resultInCaseOfFailure
return wrapper
return decorate
def _logEnterAndExit(loggerMethod, enterMessage, exitMessage,
f, self, *args, **kwds):
loggerMethod(enterMessage)
res= f(self, *args, **kwds)
loggerMethod(exitMessage)
return res
def logEnterAndExit(enterMessage, exitMessage, level='notice'):
def wrapperFunc(f):
@wraps(f)
def wrapper(self, *args, **kwds):
if self._logger is None:
raise LoggerException(
"Logger unavailable for message '%s' '%s'" %
(enterMessage, exitMessage))
loggerMethod= self._logger.__getattribute__(level)
return _logEnterAndExit(loggerMethod,
enterMessage, exitMessage,
f, self, *args, **kwds)
return wrapper
return wrapperFunc
def logTime(f):
@wraps(f)
def wrappedMethod(self, *args, **kwds):
t0= time.time()
try:
return f(self, *args, **kwds)
finally:
diffSec= time.time() - t0
self._logger.notice("Method '%s' took %.3f sec" % (
f.__name__, diffSec))
return wrappedMethod
def cacheResult(f):
@wraps(f)
def wrapper(self, *args):
cacheName= f.__name__ + "_cached_result"
if cacheName not in self.__dict__:
self.__dict__[cacheName]= {}
key= (args)
if key not in self.__dict__[cacheName]:
result= f(self, *args)
self.__dict__[cacheName][key]= result
return self.__dict__[cacheName][key]
return wrapper
def override(f):
return f
def logFailureAndContinue(func):
@wraps(func)
def wrappedMethod(self, *args, **kwds):
try:
return func(self, *args, **kwds)
except Exception as e:
traceback.print_exc()
self._logger.error("'%s' failed: %s" % (
func.__name__, str(e)))
return wrappedMethod
def _synchronizedWith(lock):
def decorator(func):
@wraps(func)
def synchedFunc(*args, **kwds):
with lock:
return func(*args, **kwds)
return synchedFunc
return decorator
def _synchronizedWithAttr(lockName):
def decorator(method):
@wraps(method)
def synchronizedMethod(self, *args, **kwds):
lock= self.__dict__[lockName]
with lock:
return method(self, *args, **kwds)
return synchronizedMethod
return decorator
def synchronized(item):
if isinstance(item, str):
return _synchronizedWithAttr(item)
elif inspect.isclass(item):
syncClass= item
lock= threading.RLock()
origInit= syncClass.__init__
def __init__(self, *args, **kwds):
self.__lock__= lock
origInit(self, *args, **kwds)
syncClass.__init__= __init__
for key in syncClass.__dict__:
val= syncClass.__dict__[key]
if isinstance(val, types.FunctionType):
decorator= _synchronizedWith(lock)
setattr(syncClass, key, decorator(val))
return syncClass
else:
assert False, "Unsupported item type: %s is of type %s" % (
str(item), type(item))
| {
"repo_name": "lbusoni/pi_gcs",
"path": "pi_gcs/decorator.py",
"copies": "1",
"size": "4972",
"license": "mit",
"hash": -424292062654775400,
"line_mean": 23.1359223301,
"line_max": 70,
"alpha_frac": 0.5663716814,
"autogenerated": false,
"ratio": 4.2459436379163105,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.003060593831463345,
"num_lines": 206
} |
from functools import wraps
import time
from app.util import log
from app.util.exceptions import AuthenticationError
from app.util.secret import Secret
def retry_on_exception_exponential_backoff(exceptions, initial_delay=0.1, total_delay=15, exponential_factor=2):
"""
Retries with exponential backoff.
:param exceptions: The exceptions that we will catch and retry on.
:type exceptions: list[Exception]
:param initial_delay: num seconds that the first retry period will be
:type initial_delay: float
:param total_delay: the total number of seconds of the sum of all retry periods
:type total_delay: float
:param exponential_factor: Cannot be smaller than 1.
:type exponential_factor: float
"""
def method_decorator(function):
@wraps(function)
def function_with_retries(*args, **kwargs):
# If initial_delay is negative, then exponentiation would go infinitely.
if initial_delay <= 0:
raise RuntimeError('initial_delay must be greater than 0, was set to {}'.format(str(initial_delay)))
# The exponential factor must be greater than 1.
if exponential_factor <= 1:
raise RuntimeError('exponential_factor, {}, must be greater than 1'.format(exponential_factor))
delay = initial_delay
total_delay_so_far = 0
while True:
try:
return function(*args, **kwargs)
except exceptions as ex:
if total_delay_so_far > total_delay:
raise # final attempt failed
log.get_logger(__name__).warning('Call to {} raised {}("{}"). Retrying in {} seconds.',
function.__qualname__, type(ex).__name__, ex, delay)
time.sleep(delay)
total_delay_so_far += delay
delay *= exponential_factor
return function_with_retries
return method_decorator
def authenticated(function):
"""
Fail the request if the correct secret is not included in either the headers or the request body. This should be
called on all mutation requests. (POST, PUT)
"""
@wraps(function)
def function_with_auth(self, *args, **kwargs):
header_digest = self.request.headers.get(Secret.DIGEST_HEADER_KEY)
if not Secret.digest_is_valid(header_digest, self.encoded_body.decode('utf-8')):
raise AuthenticationError('Message digest does not match header, message not authenticated.')
return function(self, *args, **kwargs)
return function_with_auth
| {
"repo_name": "josephharrington/ClusterRunner",
"path": "app/util/decorators.py",
"copies": "4",
"size": "2676",
"license": "apache-2.0",
"hash": 8203389082012148000,
"line_mean": 40.1692307692,
"line_max": 116,
"alpha_frac": 0.6255605381,
"autogenerated": false,
"ratio": 4.597938144329897,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0015062269642395106,
"num_lines": 65
} |
from functools import wraps
import time
from .static import static
def once_every(n):
""" run the wrapped function only every nth call after the first call """
def outer(func):
@wraps(func)
@static(ncalls=0, ncalls_actual=0, ncalls_filtered=0)
def inner(*args, **kwargs):
if inner.ncalls % n == 0:
func(*args, **kwargs)
inner.ncalls_actual += 1
else:
inner.ncalls_filtered += 1
inner.ncalls += 1
return inner
return outer
def once_every_timer(period):
""" run the method only once within each period of time (in seconds) """
def outer(func):
@wraps(func)
@static(last=None, ncalls=0, ncalls_actual=0, ncalls_filtered=0)
def inner(*args, **kwargs):
now = time.time()
if inner.last is None or inner.last < now - period:
inner.last = now
func(*args, **kwargs)
inner.ncalls_actual += 1
else:
inner.ncalls_filtered += 1
inner.ncalls += 1
return inner
return outer
| {
"repo_name": "bigblindbais/pytk",
"path": "src/pytk/decorators/once_every.py",
"copies": "1",
"size": "1148",
"license": "mit",
"hash": -5455107862702851000,
"line_mean": 29.2105263158,
"line_max": 77,
"alpha_frac": 0.5400696864,
"autogenerated": false,
"ratio": 4.056537102473499,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 38
} |
from functools import wraps
import time
from ._util import maybe_labels
class Timer:
"""A Timer instance for instrumenting function runs,
or arbitrary pieces of code.
Usage:
stats = Stats('greasyspoon')
with stats.timed(
'eggs',
tags=['spam:ham'],
verbose_name='Number of eggs in a basket.'):
... your code here ...
"""
def _wrapped_factory(self, func):
"""Report an execution time for each function run.
:param func (function): A function to wrap for measurement.
:returns function: A measured function.
"""
self.func = func
@wraps(func)
def wrapped(*args, **kwargs):
self.time_start = time.time()
ret = func(*args, **kwargs)
self.__exit__(None, None, None)
return ret
return wrapped
def __init__(self, metric, tags=()):
self.metric = metric
self.tags = tags
self.time_start = None
self.func = None
def __repr__(self):
s = '{}'.format(self.__class__.__name__)
if self.time_start:
s = '{}<{}>'.format(s, str(time.time() - self.time_start))
if self.func:
s = '{}[{}]'.format(s, self.func.__name__)
return s
def __call__(self, func):
return self._wrapped_factory(func)
def __enter__(self):
self.time_start = time.time()
def __exit__(self, exc_type, exc_val, exc_tb):
maybe_labels(self.metric, self.tags)\
.observe(time.time() - self.time_start)
| {
"repo_name": "Intel471/prom-stats",
"path": "promstats/_timer.py",
"copies": "1",
"size": "1601",
"license": "mit",
"hash": -3822540687229413000,
"line_mean": 25.6833333333,
"line_max": 70,
"alpha_frac": 0.5315427858,
"autogenerated": false,
"ratio": 4.0327455919395465,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5064288377739546,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import time
from vomitter import LOGGER as L
from .response import json_error
def validate_form_data(FORM_Class):
"""
Validates the passed form/json data to a request and passes the
form to the called function.
If form data is not valid, return a 406 response.
"""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
form = FORM_Class(csrf_enabled=False)
if not form.validate():
return json_error(code=406, data=form.errors)
kwargs['form'] = form
return f(*args, **kwargs)
return decorated_function
return decorator
def profile(f):
"""
Decorate a function with this to know its running time.
"""
@wraps(f)
def decorated_function(*args, **kwargs):
strt = time.time()
L.i('Calling function %s @ %f' % (f.func_name, strt))
res = f(*args, **kwargs)
end = time.time()
L.i('Finished function %s @ %f' % (f.func_name, end))
L.i('Time taken : %f ms' % (end - strt))
return res
return decorated_function
| {
"repo_name": "brijeshb42/yapper",
"path": "yapper/lib/decorators.py",
"copies": "1",
"size": "1144",
"license": "mit",
"hash": 131100134475295250,
"line_mean": 26.2380952381,
"line_max": 67,
"alpha_frac": 0.590034965,
"autogenerated": false,
"ratio": 3.763157894736842,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9853192859736842,
"avg_score": 0,
"num_lines": 42
} |
from functools import wraps
import time
__author__ = "Aaron Koeppel"
__version__ = 1.0
def retry(ExceptionToCheck, logger, tries=4, delay=3, backoff=2):
"""Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
exceptions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
@wraps(f)
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 1:
try:
return f(*args, **kwargs)
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
return f(*args, **kwargs)
return f_retry # true decorator
return deco_retry
| {
"repo_name": "ak212/python-basketball-rss",
"path": "retry_decorator.py",
"copies": "1",
"size": "1533",
"license": "mit",
"hash": 5367840583812542000,
"line_mean": 30.9375,
"line_max": 76,
"alpha_frac": 0.6073059361,
"autogenerated": false,
"ratio": 3.9921875,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.50994934361,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import time
class limit(object):
class Exception(BaseException):
pass
def __init__(self, per_second=None, per_minute=None):
self.period = 0.0
self.max_calls = 0
if per_second:
self.period = 1.0
self.max_calls = per_second
elif per_minute:
self.period = 60.0
self.max_calls = per_minute
else:
raise limit.Exception("You must provide either per_second,"
"per_minute or per_hour values.")
self.calls_counter = 0
self.last_call_time = None
def __call__(self, f):
def wrapped(*args, **kwargs):
now = time.time()
delay = 0.0
if self.last_call_time is not None:
timedelta = now - self.last_call_time
if timedelta <= self.period
and self.calls_counter >= self.max_calls:
self.calls_counter = 0
delay = abs(self.period - timedelta)
time.sleep(delay)
self.last_call_time = time.time()
f(*args, **kwargs)
self.calls_counter += 1
return wrapped
| {
"repo_name": "vladignatyev/limiter",
"path": "limiter/limiter/__init__.py",
"copies": "1",
"size": "1216",
"license": "mit",
"hash": 592909721133228400,
"line_mean": 30.1794871795,
"line_max": 71,
"alpha_frac": 0.515625,
"autogenerated": false,
"ratio": 4.2369337979094075,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5252558797909407,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import traceback
import sys
from flask import Flask, request, jsonify, current_app
from yaqluator import yaqluator
from utils import files
# init the Flask app
app = Flask(__name__)
def jsonp(func):
"""Wraps JSONified output for JSONP requests."""
@wraps(func)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
data = str(func(*args, **kwargs).data)
content = str(callback) + '(' + data + ')'
mimetype = 'application/javascript'
return current_app.response_class(content, mimetype=mimetype)
else:
return func(*args, **kwargs)
return decorated_function
@app.route("/evaluate/", methods=['POST'])
@jsonp
def handle_evaluate():
data = request.json or request.form
if data is None:
return json_error_response("yaml and yaql_expression are missing in request body")
if not "yaql_expression" in data:
return json_error_response("yaql_expression is missing")
if not "yaml" in data:
return json_error_response("yaml is missing")
legacy = str(data.get("legacy", False)).lower() == "true"
return invoke(yaqluator.evaluate, {"yaql_expression": data["yaql_expression"], "yaml_string": data["yaml"], "legacy": legacy})
@app.route("/autoComplete/", methods=['POST'])
@jsonp
def handle_auto_complete():
data = request.json or request.form
if data is None:
return json_error_response("yaml and yaql_expression are missing in request body")
if not "yaql_expression" in data:
return json_error_response("yaql_expression is missing")
if not "yaml" in data:
return json_error_response("yaml is missing")
legacy = str(data.get("legacy", False)).lower() == "true"
return invoke(yaqluator.auto_complete, {"yaql_expression": data["yaql_expression"], "yaml_string": data["yaml"], "legacy":legacy})
@app.route("/examples/", methods=["GET"])
@jsonp
def list_examples():
return invoke(files.list_examples, value_key="examples")
@app.route("/examples/<example_name>", methods=["GET"])
@jsonp
def get_example(example_name):
# if "exampleName" not in request.args:
# return json_error_response("example name is missing")
return invoke(files.get_example, {"example_name": example_name})
def invoke(function, params=None, value_key="value"):
try:
params = params or {}
response = function(**params)
ret = {"statusCode": 1, value_key: response}
except Exception as e:
#print format_exception(e)
ret = error_response(str(e))
return jsonify(**ret)
def json_error_response(message):
return jsonify({"statusCode": -1, "error": message})
def error_response(message):
return {"statusCode": -1, "error": message}
def format_exception(e):
exception_list = traceback.format_stack()
exception_list = exception_list[:-2]
exception_list.extend(traceback.format_tb(sys.exc_info()[2]))
exception_list.extend(traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]))
exception_str = "Traceback (most recent call last):\n"
exception_str += "".join(exception_list)
# Removing the last \n
exception_str = exception_str[:-1]
return exception_str
if __name__ == "__main__":
app.run(debug=True)
| {
"repo_name": "ALU-CloudBand/yaqluator",
"path": "python/api.py",
"copies": "1",
"size": "3371",
"license": "mit",
"hash": 3054387869228121000,
"line_mean": 31.7281553398,
"line_max": 134,
"alpha_frac": 0.6588549392,
"autogenerated": false,
"ratio": 3.7084708470847083,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48673257862847086,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import traceback
from .data_collection import DataCollection
from .data_factories import load_data
from . import command
from . import Data, Subset
from .hub import HubListener
from .util import PropertySetMixin
from .edit_subset_mode import EditSubsetMode
from .session import Session
from ..config import settings
__all__ = ['Application', 'ViewerBase']
def catch_error(msg):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
m = "%s\n%s" % (msg, e.message)
detail = str(traceback.format_exc())
self = args[0]
self.report_error(m, detail)
return wrapper
return decorator
class Application(HubListener):
def __init__(self, data_collection=None, session=None):
if session is not None:
self._session = session
session.application = self
self._data = session.data_collection
else:
self._data = data_collection or DataCollection()
self._session = Session(data_collection=self._data,
application=self)
EditSubsetMode().data_collection = self._data
self._hub = self._session.hub
self._cmds = self._session.command_stack
self._cmds.add_callback(lambda x: self._update_undo_redo_enabled())
self._settings = {}
for key, value, validator in settings:
self._settings[key] = [value, validator]
self._load_settings()
@property
def session(self):
return self._session
def new_data_viewer(self, viewer_class, data=None):
"""
Create a new data viewer, add it to the UI,
and populate with data
"""
if viewer_class is None:
return
c = viewer_class(self._session)
c.register_to_hub(self._session.hub)
if data and not c.add_data(data):
c.close(warn=False)
return
self.add_widget(c)
c.show()
return c
@catch_error("Failed to save session")
def save_session(self, path):
""" Save the data collection and hub to file.
Can be restored via restore_session
Note: Saving of client is not currently supported. Thus,
restoring this session will lose all current viz windows
"""
from .state import GlueSerializer
gs = GlueSerializer(self)
with open(path, 'w') as out:
gs.dump(out, indent=2)
def new_tab(self):
raise NotImplementedError()
def add_widget(self, widget, label=None, tab=None):
raise NotImplementedError()
def close_tab(self):
raise NotImplementedError()
def get_setting(self, key):
"""
Fetch the value of an application setting
"""
return self._settings[key][0]
def set_setting(self, key, value):
"""
Set the value of an application setting
Raises a KeyError if the setting does not exist
Raises a ValueError if the value is invalid
"""
validator = self._settings[key][1]
self._settings[key][0] = validator(value)
@property
def settings(self):
"""Iterate over settings"""
for key, (value, _) in self._settings.items():
yield key, value
def _load_settings(self, path=None):
raise NotImplementedError()
@catch_error("Could not load data")
def load_data(self, path):
d = load_data(path)
if not isinstance(d, list):
d = [d]
self._data.extend(d)
def report_error(self, message, detail):
""" Report an error message to the user.
Must be implemented in a subclass
:param message: the message to display
:type message: str
:detail: Longer context about the error
:type message: str
"""
raise NotImplementedError()
def do(self, command):
self._cmds.do(command)
def undo(self):
try:
self._cmds.undo()
except RuntimeError:
pass
def redo(self):
try:
self._cmds.redo()
except RuntimeError:
pass
def _update_undo_redo_enabled(self):
raise NotImplementedError()
@property
def viewers(self):
"""Return a tuple of tuples of viewers currently open
The i'th tuple stores the viewers in the i'th close_tab
"""
raise NotImplementedError()
def __gluestate__(self, context):
viewers = [map(context.id, tab) for tab in self.viewers]
data = self.session.data_collection
return dict(session=context.id(self.session), viewers=viewers,
data=context.id(data))
@classmethod
def __setgluestate__(cls, rec, context):
self = cls(data_collection=context.object(rec['data']))
# manually register the newly-created session, which
# the viewers need
context.register_object(rec['session'], self.session)
for i, tab in enumerate(rec['viewers']):
if self.tab(i) is None:
self.new_tab()
for v in tab:
viewer = context.object(v)
self.add_widget(viewer, tab=i, hold_position=True)
return self
class ViewerBase(HubListener, PropertySetMixin):
""" Base class for data viewers in an application """
# the glue.clients.layer_artist.LayerArtistContainer
# class/subclass to use
_container_cls = None
def __init__(self, session):
super(ViewerBase, self).__init__()
self._session = session
self._data = session.data_collection
self._hub = None
self._container = self._container_cls()
def register_to_hub(self, hub):
self._hub = hub
def unregister(self, hub):
""" Abstract method to unsubscribe from messages """
raise NotImplementedError
def request_add_layer(self, layer):
""" Issue a command to add a layer """
cmd = command.AddLayer(layer=layer, viewer=self)
self._session.command_stack.do(cmd)
def add_layer(self, layer):
if isinstance(layer, Data):
self.add_data(layer)
elif isinstance(layer, Subset):
self.add_subset(layer)
# else: SubsetGroup
def add_data(self, data):
""" Add a data instance to the viewer
This must be overridden by a subclass
:param data: Data object to add
:type data: :class:`~glue.core.data.Data`
"""
raise NotImplementedError
def add_subset(self, subset):
""" Add a subset to the viewer
This must be overridden by a subclass
:param subset: Subset instance to add
:type subset: :class:`~glue.core.subset.Subset`
"""
raise NotImplementedError
def apply_roi(self, roi):
"""
Apply an ROI to the client
:param roi: The ROI to apply
:type roi: :class:`~glue.core.roi.Roi`
"""
cmd = command.ApplyROI(client=self.client, roi=roi)
self._session.command_stack.do(cmd)
@property
def session(self):
return self._session
@property
def axes(self):
return self.client.axes
def layer_view(self):
raise NotImplementedError()
def options_widget(self):
raise NotImplementedError()
def move(self, x=None, y=None):
""" Reposition a viewer within the application.
:param x: Offset of viewer's left edge from the left edge
of the parent window. Optional
:type x: int
:param y: Offset of the viewer's top edge from the top edge
of the parent window. Optional
:type y: int
"""
raise NotImplementedError()
@property
def position(self):
""" Return the location of the viewer
:rtype: (x, y). Tuple of 2 integers
"""
raise NotImplementedError()
@property
def viewer_size(self):
""" Return the size of the viewer
:rtype: (width, height). Tuple of 2 ints
"""
raise NotImplementedError()
@viewer_size.setter
def viewer_size(self, value):
""" Resize the width and/or height of the viewer
:param value: (width, height)
:param width: new width. Optional.
:type width: int
:param height: new height. Optional.
:type height: int
"""
raise NotImplementedError()
def restore_layers(self, rec, context):
"""
Given a list of glue-serialized layers, restore them
to the viewer
"""
# if this viewer manages a client, rely on it to restore layers
if hasattr(self, 'client'):
return self.client.restore_layers(rec, context)
raise NotImplementedError()
@property
def layers(self):
"""Return a tuple of layers in this viewer.
A layer is a visual representation of a dataset or subset within
the viewer"""
return tuple(self._container)
def __gluestate__(self, context):
return dict(session=context.id(self._session),
size=self.viewer_size,
pos=self.position,
properties=dict((k, context.id(v))
for k, v in self.properties.items()),
layers=map(context.do, self.layers)
)
@classmethod
def __setgluestate__(cls, rec, context):
session = context.object(rec['session'])
result = cls(session)
result.register_to_hub(session.hub)
result.viewer_size = rec['size']
x, y = rec['pos']
result.move(x=x, y=y)
prop = dict((k, context.object(v)) for
k, v in rec['properties'].items())
result.restore_layers(rec['layers'], context)
result.properties = prop
return result
| {
"repo_name": "bsipocz/glue",
"path": "glue/core/application_base.py",
"copies": "1",
"size": "10124",
"license": "bsd-3-clause",
"hash": -5930902332769742000,
"line_mean": 27.5988700565,
"line_max": 75,
"alpha_frac": 0.580501778,
"autogenerated": false,
"ratio": 4.337617823479006,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5418119601479006,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import traceback
from reportmail.reporter import Reporter
def apply_reporter(subject, template='reportmail/command_report.txt', committer=None,
reporter_cls=Reporter, additional_context=None):
""" Adding a reporting feature for django command
You can use this as decorator for Command.handle.
and decorated handle() will get admin mail reporter object after `self`::
@apply_reporter("Title of report", 'path/to/template.txt')
def handle(self, reporter, *args, **options):
...
By default, `apply_reporter` will use the `reportmail/command_report.txt` template.
To change the template, you can put same name template.
This decorator provide these additional values for template as context:
* args: arguments of command calling.
* options: option arguments of command calling and some value of enviroments.
* command: module path for this command.
Notice that if the decorated command raises an exception,
It will caught it to add the traceback to report mail.
After added the error message, raised exception will be reraised.
:arg subject: Title of report
:arg template: Template to use rendering
:arg committer: Committer function to be passed for the reporter.
"""
def wrapper(handle_func):
@wraps(handle_func)
def wrapped(self, *args, **options):
base_context = {
'args': args,
'options': options,
'command': self.__module__
}
if additional_context:
base_context.update(additional_context)
with reporter_cls(
subject, template,
base_context=base_context,
committer=committer,
) as reporter:
try:
ret = handle_func(self, reporter, *args, **options)
except Exception as e:
reporter.append(str(e) + '\n\n' + traceback.format_exc())
raise
return ret
return wrapped
return wrapper
| {
"repo_name": "hirokiky/django-reportmail",
"path": "reportmail/command.py",
"copies": "1",
"size": "2136",
"license": "mit",
"hash": -2368367674827926500,
"line_mean": 35.2033898305,
"line_max": 87,
"alpha_frac": 0.6189138577,
"autogenerated": false,
"ratio": 4.736141906873614,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005963841574847469,
"num_lines": 59
} |
from functools import wraps
import traceback
from smoothie.exc import CallableCallbackException
class Dispenser(object):
def __init__(self):
self.map = {}
def attach(self, exception=Exception, callback=None):
'''
decorator needed to be attached to functions or
class functions, in order to catch exceptions and
call specified callback function
:param exception: Exception
:param callback: function
:return: decorated function
'''
def _attach_to_func(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except exception as ex:
if not callable(callback):
raise CallableCallbackException
else:
kwargs['exc_info'] = traceback.format_exc()
kwargs['ex'] = ex
# HACK(TheSriram): Remove the self arg
return callback(args[1:], **kwargs)
self.map[func.__name__] = func
return wrapper
return _attach_to_func
def original(self, function):
return self.map[function]
| {
"repo_name": "TheSriram/smoothie",
"path": "smoothie/king.py",
"copies": "1",
"size": "1265",
"license": "apache-2.0",
"hash": -6122995495654560000,
"line_mean": 27.1111111111,
"line_max": 67,
"alpha_frac": 0.5351778656,
"autogenerated": false,
"ratio": 5.205761316872428,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008888888888888889,
"num_lines": 45
} |
from functools import wraps
import urlparse
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.decorators import available_attrs
from django.conf import settings
from django.http import HttpResponseRedirect
from pymongo.cursor import Cursor
from onadata.apps.logger.models import XForm
def check_obj(f):
@wraps(f)
def with_check_obj(*args, **kwargs):
if args[0]:
return f(*args, **kwargs)
return with_check_obj
def is_owner(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
# assume username is first arg
if request.user.is_authenticated():
if request.user.username == kwargs['username']:
return view_func(request, *args, **kwargs)
protocol = "https" if request.is_secure() else "http"
return HttpResponseRedirect("%s://%s" % (protocol,
request.get_host()))
path = request.build_absolute_uri()
login_url = request.build_absolute_uri(settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse.urlparse(login_url)[:2]
current_scheme, current_netloc = urlparse.urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(path, None, REDIRECT_FIELD_NAME)
return _wrapped_view
def apply_form_field_names(func):
@wraps(func)
def wrapper(*args, **kwargs):
def _get_decoded_record(record):
if isinstance(record, dict):
for field in record:
if isinstance(record[field], list):
tmp_items = []
items = record[field]
for item in items:
tmp_items.append(_get_decoded_record(item))
record[field] = tmp_items
if field not in field_names.values() and \
field in field_names.keys():
record[field_names[field]] = record.pop(field)
return record
cursor = func(*args, **kwargs)
if isinstance(cursor, Cursor) and 'id_string' in kwargs and\
'username' in kwargs:
username = kwargs.get('username')
id_string = kwargs.get('id_string')
dd = XForm.objects.get(
id_string=id_string, user__username=username)
records = []
field_names = dd.data_dictionary().get_mongo_field_names_dict()
for record in cursor:
records.append(_get_decoded_record(record))
return records
return cursor
return wrapper
| {
"repo_name": "awemulya/fieldsight-kobocat",
"path": "onadata/libs/utils/decorators.py",
"copies": "11",
"size": "3041",
"license": "bsd-2-clause",
"hash": 216284171471084500,
"line_mean": 39.0131578947,
"line_max": 75,
"alpha_frac": 0.5810588622,
"autogenerated": false,
"ratio": 4.331908831908832,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import weakref
from asyncio import coroutine, gather
class Named:
def __init__(self, *args, name, **kws):
super().__init__(*args, **kws)
self.name = name
class Annotate(Named):
""" annotation that is transformed to a class """
def __new__(cls, definition):
return wraps(definition)(super().__new__(cls))
def __init__(self, definition):
super().__init__(name=definition.__name__)
self.definition = definition
class Conotate(Annotate):
""" annotation that is defined as a coroutine """
def __init__(self, definition, *args, **kws):
definition = coroutine(definition)
super().__init__(definition, *args, **kws)
class Descr(Named):
""" base for building descriptors """
def lookup(self, obj):
""" abstract method that returns the dict and key to access/store the value """
raise NotImplementedError
def has_entry(self, obj):
""" check if descriptor is set on an object """
dct, key = self.lookup(obj)
return key in dct
class ObjDescr(Descr):
""" decriptor mixin to putting values in objects dict """
def __init__(self, name):
super().__init__(name=name)
self.entry = '_' + name
def lookup(self, obj):
return obj.__dict__, self.entry
class RefDescr(Descr):
""" descriptor mixin based on weak reference from objects """
def __init__(self, name):
super().__init__(name=name)
self.refs = weakref.WeakKeyDictionary()
def lookup(self, obj):
return self.refs, obj
class Get(Descr):
""" get descriptor calling using provided lookup and falling back to __default__ """
def __get__(self, obj, objtype=None):
if obj is None:
return self
dct, key = self.lookup(obj)
try:
return dct[key]
except KeyError:
return self.__default__(obj)
def __default__(self, obj):
""" provider for default value of descriptor, raising NameError by default """
raise NameError("Descriptor %s of %s object has no value set" %
(self.name, type(obj).__name__))
@classmethod
def iter(desc, obj, bind=False):
"""
iteratete over all fields of the object of this descriptors class
"""
cls = type(obj)
for name in dir(cls):
attr = getattr(cls, name)
if isinstance(attr, desc):
if bind:
yield attr.__get__(obj)
else:
yield attr
class Defaults(Annotate, Descr):
""" descriptor evaluationing definition once """
def __default__(self, obj):
return self.definition(obj)
class Set(Descr):
""" set/delete descriptor """
def __init__(self, *args, **kws):
super().__init__(*args, **kws)
self._post = None
def post(self, f):
assert callable(f)
self._post = f
return self
def __set__(self, obj, value):
dct, key = self.lookup(obj)
dct[key] = value
if self._post:
self._post(obj)
def __delete__(self, obj):
dct, key = self.lookup(obj)
dct.pop(key, None)
class Cache(Set, Get):
"""
get descriptor remembering the default value for further calls to get
"""
def __get__(self, obj, objtype=None):
if obj is None:
return self
dct, key = self.lookup(obj)
try:
return dct[key]
except KeyError:
val = self.__default__(obj)
self.__set__(obj, val)
return val
class attr(Defaults, ObjDescr, Get, Set):
""" attribute descriptor with additional features """
pass
class delayed(Defaults, ObjDescr, Cache):
""" evaluate once and stored in obj dict, so values get pickled """
pass
class refers(Defaults, RefDescr, Cache):
""" keep values around, but reevaluate after pickling """
pass
cached = refers
class once(Defaults, RefDescr, Cache):
def __set__(self, obj, value):
if obj:
dct, key = self.lookup(obj)
if key in dct:
raise AttributeError(
"Attribute {} of {} can only be set once"
.format(self.name, type(obj)))
dct[key] = value
return value
else:
return self
class initialized(Conotate, RefDescr, Cache):
"""
call coroutine once at with `initialize` with supplied kwargs to get value
"""
pass
@coroutine
def initialize(obj, **opts):
""" call all `@initialized` descriptors to initialize values """
calls = []
for desc in initialized.iter(obj):
if desc.has_entry(obj):
@coroutine
def init():
val = yield from desc.definition(obj, **opts)
desc.__set__(obj, val)
return desc.name, val
return dict((yield from gather(*calls)))
| {
"repo_name": "wabu/pyadds",
"path": "pyadds/annotate.py",
"copies": "1",
"size": "5010",
"license": "mit",
"hash": -2858577644568930000,
"line_mean": 24.9585492228,
"line_max": 88,
"alpha_frac": 0.5610778443,
"autogenerated": false,
"ratio": 4.296740994854202,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005681833390121137,
"num_lines": 193
} |
from functools import wraps
import weakref
from asyncio import coroutine
from supplies.coop import Named
__author__ = 'dwae'
__all__ = ['delay', 'refer', 'update', 'attr']
class Annotate(Named):
"""
The Annotate class wraps an annotated definition into an object
"""
def __new__(cls, *args, **kws):
if len(args) == 1 and not kws:
definition, = args
if hasattr(definition, '__name__'):
return wraps(definition)(super().__new__(cls))
sup = super()
def annotate(f):
self = wraps(f)(sup.__new__(cls))
self.__init__(f, *args, **kws)
return self
return annotate
def __init__(self, definition):
super().__init__(name=definition.__name__)
self.definition = definition
def call(self, *args, **kws):
return self.definition(*args, **kws)
@classmethod
def iter(cls, instance, owner=None):
typ = owner or type(instance)
for name in dir(typ):
desc = getattr(typ, name)
if isinstance(desc, cls):
yield desc
class Conotate(Annotate):
"""
The Conotate mixin for Annotate wraps its definition with asyncio.coroutine
"""
def __init__(self, definition, *args, **kws):
super().__init__(coroutine(definition), *args, **kws)
class Descriptor:
"""
Base class to define binding behaviour for descriptors
"""
def lookup(self, instance):
return {}, None
def has_entry(self, instance):
dct, key = self.lookup(instance)
return key in dct
def get_entry(self, instance):
dct, key = self.lookup(instance)
return dct[key]
def set_entry(self, instance, val):
dct, key = self.lookup(instance)
dct[key] = val
def del_entry(self, instance):
dct, key = self.lookup(instance)
del dct[key]
class ObjDescriptor(Descriptor, Named):
"""
The ObjDescr mixin stores values into objects __dict__.
"""
def __init__(self, *args, **kws):
super().__init__(*args, **kws)
self.entry = '_'+self.name
def lookup(self, instance):
return instance.__dict__, self.entry
class RefDescriptor(Descriptor):
"""
The RefDscr mixin associates values weakly to objects in it's own dict
"""
def __init__(self, *args, **kws):
super().__init__(*args, **kws)
self.refs = weakref.WeakKeyDictionary()
def lookup(self, instance):
return self.refs, instance
class Get(Descriptor):
"""
Get mixin supports getting values from the descriptor
"""
def __get__(self, instance, owner=None):
if instance is None:
return self
if self.has_entry(instance):
return self.get_entry(instance)
else:
return self.__default__(instance)
def __default__(self, instance):
raise NameError("Descriptor {} of {} object has no associated value"
.format(self, type(instance).__name__))
class Property(Get, Annotate):
"""
Property mixin calls the definition to get a value
"""
def __default__(self, instance):
return self.call(instance)
class Set(Descriptor):
"""
Set mixin supports setting and deleting values from the descriptor
"""
def __set__(self, instance, value):
self.set_entry(instance, value)
def __delete__(self, instance):
self.del_entry(instance)
class Update(Get, Set, Annotate):
"""
Update mixin calls the definition with the supplied value when set
"""
def __set__(self, instance, val):
val = self.call(instance, val)
super().__set__(instance, val)
class _Update(Update, ObjDescriptor):
pass
update = _Update
class Cache(Get, Set):
"""
Cache mixin calls it's definition only once, storing the return value
"""
def __default__(self, instance):
val = super().__default__(instance)
self.set_entry(instance, val)
return val
class Attr(Property, ObjDescriptor, Set):
"""
@attr descriptors allow setting a value but call definition by default
"""
pass
attr = Attr
class Delay(Cache, Attr):
"""
@delay descriptors call definition when needed
and store the returned value
"""
pass
delay = Delay
class Refer(Cache, Property, RefDescriptor, Get, Set):
"""
@refer descriptors call definition when needed and
refer to there value reference by the object
"""
pass
refer = Refer
| {
"repo_name": "gameduell/pysupplies",
"path": "supplies/annotate.py",
"copies": "1",
"size": "4575",
"license": "mit",
"hash": 5911677979912173000,
"line_mean": 22.2233502538,
"line_max": 79,
"alpha_frac": 0.5916939891,
"autogenerated": false,
"ratio": 4.166666666666667,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5258360655766667,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import weakref
class WeakableDict(dict):
"dict subclass that can accept weakrefs."
__slots__ = ('__weakref__')
class WeakKeyNonHashingDict:
"""
A mapping that uses object IDs instead of hashes to store values.
Similar to `weakref.WeakKeyDictionary`, this mapping does not keep
its keys alive. Values are automatically removed from this mapping
when their key is garbage collected. However, unlike
`weakref.WeakKeyDictionary`, this mapping allows storing
non-hashable types (such as dict) with limited overhead (i.e.,
without converting to a tuple).
"""
__slots__ = ['_data']
def __init__(self):
self._data = {}
def __getitem__(self, obj):
return self._data[id(obj)][1]
def __setitem__(self, obj, value):
key = id(obj)
try:
ref = self._data[key][0]
except KeyError:
def on_destroy(_):
del self._data[key]
ref = weakref.ref(obj, on_destroy)
self._data[key] = ref, value
def __delitem__(self, obj):
del self._data[id(obj)]
def __len__(self):
return len(self._data)
def weak_cache(func):
"Wrap the given single-argument function using an WeakKeyNonHashingDict."
cache = WeakKeyNonHashingDict()
@wraps(func)
def wrapper(data):
try:
return cache[data]
except KeyError:
cache[data] = result = func(data)
return result
wrapper._cache = cache
return wrapper
| {
"repo_name": "felipeochoa/fforms",
"path": "fforms/cache.py",
"copies": "1",
"size": "1553",
"license": "mit",
"hash": -4638858030060469000,
"line_mean": 24.4590163934,
"line_max": 77,
"alpha_frac": 0.603348358,
"autogenerated": false,
"ratio": 4.141333333333334,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 61
} |
from functools import wraps
__all__ = ('filter_powered',)
def filter_powered(filterform_cls, queryset_name='object_list', pass_params=False,
add_count=False, aggregate_args={}, values_spec=None, deferred=None):
def decorator(view):
@wraps(view)
def filter_powered_view(request, *args, **kwargs):
output = view(request, *args, **kwargs)
# SimpleTemplateResponse objects have context in `context_data`
if hasattr(output, 'context_data'):
context = output.context_data
# Otherwise, work only with views that returns dict context
elif isinstance(output, dict):
context = output
elif isinstance(output, tuple):
context = output[1]
else:
return output
queryset = context.get(queryset_name)
filterform = filterform_cls(request.GET,
runtime_context=kwargs)
# Perform actual filtering
queryset = filterform.filter(queryset).distinct()
if add_count:
context[queryset_name + '_count'] = queryset.count()
if aggregate_args:
aggregated = queryset.aggregate(**aggregate_args)
context.update(aggregated)
if values_spec:
queryset = queryset.values(*values_spec)
if deferred is not None:
queryset, context = deferred(queryset, context)
context[queryset_name] = queryset
context['filterform'] = filterform
if isinstance(output, dict):
output = context
elif isinstance(output, tuple):
output = (output[0], context)
elif hasattr(output, 'context'):
output.context = context
return output
return filter_powered_view
return decorator
| {
"repo_name": "freevoid/django-datafilters",
"path": "datafilters/decorators.py",
"copies": "2",
"size": "1948",
"license": "mit",
"hash": -6595863497984341000,
"line_mean": 34.4181818182,
"line_max": 82,
"alpha_frac": 0.5590349076,
"autogenerated": false,
"ratio": 4.994871794871795,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6553906702471795,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
__all__ = ['memoize', 'singleton', 'memoize_attr_check']
def _make_key(args, kwargs):
return args, frozenset(kwargs.items())
def memoize(func):
"""Save results of function calls to avoid repeated calculation"""
memo = {}
@wraps(func)
def wrapper(*args, **kwargs):
key = _make_key(args, kwargs)
try:
return memo[key]
except KeyError:
result = func(*args, **kwargs)
memo[key] = result
return result
except TypeError: # unhashable input
return func(*args, **kwargs)
return wrapper
def memoize_attr_check(attr):
""" Memoize a method call, cached both on arguments and given attribute
of first argument (which is presumably self)
Has the effect of re-calculating results if a specific attribute changes
"""
def decorator(func):
#must return a decorator function
@wraps(func)
def result(*args, **kwargs):
first_arg = getattr(args[0], attr)
return memo(first_arg, *args, **kwargs)
@memoize
def memo(*args, **kwargs):
return func(*args[1:], **kwargs)
return result
return decorator
def singleton(cls):
"""Turn a class into a singleton, such that new objects
in this class share the same instance"""
instances = {}
@wraps(cls)
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
| {
"repo_name": "glue-viz/glue-qt",
"path": "glue/core/decorators.py",
"copies": "2",
"size": "1549",
"license": "bsd-3-clause",
"hash": -1513483216212461000,
"line_mean": 23.5873015873,
"line_max": 76,
"alpha_frac": 0.5978050355,
"autogenerated": false,
"ratio": 4.2554945054945055,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00037792894935752074,
"num_lines": 63
} |
from functools import wraps
__all__ = [
'singleton',
'memoize',
'cached_property',
]
def singleton(func):
"""
Decorator that creates a singleton instance.
If the method is called later a cached instance of the method is returned.
"""
cache = {}
@wraps(func)
def getinstance(*args, **kwargs):
if func not in cache:
cache[func] = func(*args, **kwargs)
return cache[func]
return getinstance
def memoize(func):
"""
Decorator that caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned,
and the method is not reevaluated..
"""
cache = func.cache = {}
@wraps(func)
def memoizer(*args, **kwargs):
if args not in cache:
cache[args] = func(*args, **kwargs)
return cache[args]
return memoizer
class cached_property(object):
"""
Decorator that caches the property on the instance.
Computed only once per instance.
"""
def __init__(self, func, name=None):
self.func = func
self.__doc__ = getattr(func, '__doc__')
self.name = name or func.__name__
def __get__(self, instance, cls=None):
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
| {
"repo_name": "joeljames/py-shared-utils",
"path": "pysharedutils/decorators.py",
"copies": "1",
"size": "1378",
"license": "mit",
"hash": 4409329922655300600,
"line_mean": 23.6071428571,
"line_max": 78,
"alpha_frac": 0.5957910015,
"autogenerated": false,
"ratio": 4.089020771513353,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5184811773013352,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
class ActFormatter():
@classmethod
def default_format(cls, dict_input):
return dict_input
_formatter = default_format
@classmethod
def decorate (cls, formatter_func, **kwargs):
@wraps(formatter_func)
def wprapper_func():
return ActFormatter._formatter(formatter_func(**kwargs))
return wprapper_func
@classmethod
def default_format(cls, dict_input):
return dict_input
@classmethod
def fixedwidth_format(cls, values):
table = []
header = []
max_width = []
output = ''
for hitem in values['result'][0]:
header.append(hitem)
table.append([hitem, []])
max_width.append([hitem, 0])
for item in values['result']:
for key, val in enumerate(table):
table[key][1].append(item[val[0]])
if max_width[key][1] == 0:
max_width[key][1] = len(item[val[0]])
elif max_width[key][1] < len(item[val[0]]):
max_width[key][1] = len(item[val[0]])
# print "%s legth: %d"%(val,max_width[key][1])
# Print the headers
for cols , hitem in enumerate(table):
output += str(hitem[0]).ljust(max_width[int(cols)][1]+5) + '\t'
output += "\n"
for cols , hitem in enumerate(table):
output += str('-').ljust(max_width[int(cols)][1]+5, '-') + '\t'
output += "\n"
for rows, vals in enumerate(table[0][1]):
for cols, item in enumerate(table):
output += str(item[1][int(rows)]).ljust(max_width[int(cols)][1]+5) + '\t'
output += "\n"
return output
| {
"repo_name": "Actifio/actifio-python-package",
"path": "Actifio/formatting.py",
"copies": "1",
"size": "1606",
"license": "mit",
"hash": 6354897483139592000,
"line_mean": 26.6896551724,
"line_max": 86,
"alpha_frac": 0.5666251557,
"autogenerated": false,
"ratio": 3.4837310195227764,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9382149311247248,
"avg_score": 0.033641372795105824,
"num_lines": 58
} |
from functools import wraps
class DebugWrapper():
"""
wraps nose. I want to make it so it can distribute out unittesting
to multiple cores.
"""
def listen(_self,func):
"""
Makes it so you can listen in on the contents of the function
call
"""
@wraps(func)
def debug_function(*kargs, **kwargs):
ret = {}
ret['kwargs'] = kwargs
ret['kargs'] = kargs
ret['function'] = func
return ret
return debug_function
def inspect(_self,func):
"""
Makes it so you can listen in on the contents of the function
call
"""
@wraps(func)
def debug_function(*kargs, **kwargs):
ret = {}
ret['kwargs'] = kwargs
ret['kargs'] = kargs
ret['function'] = func
ret['result'] = func(*kargs, **kwargs)
return ret
return debug_function
class DebugLink(DebugWrapper):
"""
Links you or your nose wrappers
"""
def links(self):
return DebugWrapper()
def __call__(self):
return self.links()
| {
"repo_name": "uhjish/link",
"path": "link/debuglink.py",
"copies": "2",
"size": "1165",
"license": "apache-2.0",
"hash": 5113924740833815000,
"line_mean": 22.7755102041,
"line_max": 71,
"alpha_frac": 0.5167381974,
"autogenerated": false,
"ratio": 4.604743083003952,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.022768399791622873,
"num_lines": 49
} |
from functools import wraps
class exception_guard(object):
"""Guard against the given exception and raise a different exception."""
def __init__(self, catchable, throwable=RuntimeError):
if is_exception_class(catchable):
self._catchable = catchable
else:
raise TypeError('catchable must be one or more exception types')
if throwable is None or is_exception(throwable):
self._throwable = throwable
else:
raise TypeError('throwable must be None or an exception')
def throw(self, cause):
"""Throw an exception from the given cause."""
throwable = self._throwable
assert throwable is not None
self._raisefrom(throwable, cause)
def _raisefrom(self, exception, cause):
# "raise ... from ..." syntax only supported in Python 3.
assert cause is not None # "raise ... from None" is not supported.
if isinstance(exception, BaseException):
# We're given an exception instance, so just use it as-is.
pass
else:
# We're given an exception class, so instantiate it with a
# helpful error message.
assert issubclass(exception, BaseException)
name = type(cause).__name__
message = 'guard triggered by %s exception' % name
exception = exception(message)
try:
exec("raise exception from cause", globals(), locals())
except SyntaxError:
# Python too old. Fall back to a simple raise, without cause.
raise exception
# === Context manager special methods ===
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is not None and issubclass(exc_type, self._catchable):
if self._throwable is None:
# Suppress the exception.
return True
else:
self.throw(exc_value)
# === Use exception_guard as a decorator ===
def __call__(self, function):
catchable = self._catchable
suppress_exception = (self._throwable is None)
@wraps(function)
def inner(*args, **kwargs):
try:
result = function(*args, **kwargs)
except catchable as error:
if suppress_exception:
return
else:
self.throw(error)
else:
return result
return inner
# Two helper functions.
def is_exception(obj):
"""Return whether obj is an exception.
>>> is_exception(ValueError) # An exception class.
True
>>> is_exception(ValueError()) # An exception instance.
True
>>> is_exception(float)
False
"""
try:
return issubclass(obj, BaseException)
except TypeError:
return isinstance(obj, BaseException)
def is_exception_class(obj):
"""Return whether obj is an exception class, or a tuple of the same.
>>> is_exception_class(ValueError)
True
>>> is_exception_class(float)
False
>>> is_exception_class(ValueError()) # An instance, not a class.
False
>>> is_exception_class((ValueError, KeyError))
True
"""
try:
if isinstance(obj, tuple):
return obj and all(issubclass(X, BaseException) for X in obj)
return issubclass(obj, BaseException)
except TypeError:
return False
| {
"repo_name": "ActiveState/code",
"path": "recipes/Python/580808_Guard_against_exceptiwrong/recipe-580808.py",
"copies": "1",
"size": "3490",
"license": "mit",
"hash": -7291792093708387000,
"line_mean": 31.0183486239,
"line_max": 76,
"alpha_frac": 0.5865329513,
"autogenerated": false,
"ratio": 4.697173620457605,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5783706571757605,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def authorized_method(o):
o.authentication_required = o.slug
o.authorization_required = o.slug
return o
def authenticated_method(o):
o.authentication_required = o.slug
return o
def anonymous_method(o):
o.authentication_required = False
o.authorization_required = False
return o
class authorization_required(object):
"""
Class decorator for documents, collections, applications that require authorization to access.
Adds authentication_required and authorization_required attributes to the decorated class at a minimum. It is also
possible to specify a filter function that filters documents based on a user's authentication information and
each individual document. This is achieved via rethinkdb's filter API and must use rethinkdb predicates. This should
be a nested function::
def example_filter_function(auth_info, method):
username = auth_info.username
permission = 'can_' + method
return lambda(doc): \
doc[permission].contains(username)
Args:
*protected (str): Items should be 'read', 'write', or the name of a method
filter_function (function): Should be a function that accepts a decoded auth token and an access method, then
returns another function. The second function should accept a document instance and return True or False
whether the user has access to that document.
"""
def __init__(self, *protected, filter_function=None):
self.protected = protected
self.filter_function = filter_function
def __call__(self, cls):
cls.authentication_required = self.protected
cls.authorization_required = self.protected
if self.filter_function:
cls.document_level_authorization = True
cls.authorization_filter = self.filter_function
else:
cls.document_level_authorization = False
return cls
class authentication_required(object):
def __init__(self, *protected):
self.protected = protected
def __call__(self, cls):
cls.authentication_required = self.protected
return cls
| {
"repo_name": "JeffHeard/sondra",
"path": "sondra/auth/decorators.py",
"copies": "1",
"size": "2213",
"license": "apache-2.0",
"hash": 8579606281507432000,
"line_mean": 34.126984127,
"line_max": 120,
"alpha_frac": 0.6836873023,
"autogenerated": false,
"ratio": 4.79004329004329,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.597373059234329,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def countup(function):
@wraps(function)
def wrapper(*args, **kwargs):
result = function(*args, **kwargs)
return result + 1
return wrapper
def countdown(function):
@wraps(function)
def wrapper(*args, **kwargs):
result = function(*args, **kwargs)
return result - 1
return wrapper
def add(decorator_arg):
"""デコレータadd(@addとして使う)と、デコレータの引数decorator_arg"""
def decorate(function):
"""デコレート対象の関数を引数fuctionとして受け取る、関数"""
@wraps(function)
def wrapped(*args, **kwargs):
"""デコレート対象の関数の引数をargsやkwargsとして受け取る、関数"""
# デコレート対象の関数を実行し、結果をresultに入れる
result = function(*args, **kwargs)
# resultに対し、デコレータの引数を加算して戻す
return result + decorator_arg
# decorate関数は、デコレート対象の関数をラップするwrapped関数を返す
return wrapped
# add関数は、decorate関数を返す
return decorate
def calculate(*decorator_args, **decorator_kwargs):
u""""""
def decorate(function):
@wraps(function)
def wrapped(*args, **kwargs):
result = function(*args, **kwargs)
# 可変長引数で与えられた数を合計する
summary = sum(decorator_args)
# キーワード可変長引数に減算指定がある場合は減算、それ以外は加算
if decorator_kwargs.get('is_decrement'):
return result - summary
else:
return result + summary
return wrapped
return decorate
| {
"repo_name": "thinkAmi-sandbox/python_mock-sample",
"path": "e.g._mocking_decorator/deco/my_decorator.py",
"copies": "1",
"size": "1831",
"license": "unlicense",
"hash": -1749343161272080400,
"line_mean": 25.8333333333,
"line_max": 53,
"alpha_frac": 0.5997239476,
"autogenerated": false,
"ratio": 2.601436265709156,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3701160213309156,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def decorate_with(*decorators):
"""decorate_with is a decorator that takes other decorators as input,
including their arguments (if any), and applies the them to the function
being decorated"""
def wrapper(func):
@wraps(func)
def func_wrapper(*args, **kwargs):
_func = func
for decorator in decorators:
if isinstance(decorator, list):
dec = decorator[0]
# In case the user isn't careful enough
if len(decorator) > 1:
arguments = decorator[1:len(decorator)]
kwarguments = {}
if isinstance(arguments[-1], dict):
kwarguments = arguments[-1]
arguments = arguments[0:len(arguments)-1]
_func = dec(*arguments, **kwarguments)(_func)
else:
_func = dec(_func)
else:
_func = decorator(_func)
return _func(*args, **kwargs)
return func_wrapper
return wrapper
| {
"repo_name": "fredgj/mom",
"path": "mom/meta.py",
"copies": "1",
"size": "1171",
"license": "mit",
"hash": 7714672378214631000,
"line_mean": 39.3793103448,
"line_max": 79,
"alpha_frac": 0.4918872758,
"autogenerated": false,
"ratio": 5.34703196347032,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6338919239270321,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def ensure_self(func):
"""
Decorator that can be used to ensure 'self' is the first argument on a task method.
This only needs to be used with task methods that are used as a callback to
a chord or in link_error and is really just a hack to get around https://github.com/celery/celery/issues/2137
Usage:
.. code-block:: python
class Foo(models.Model):
def __init__(self):
self.bar = 1
@task
def first(self):
pass
@task
@ensure_self
def last(self, results=None):
print self.bar
Then the following is performed:
.. code-block:: python
f = Foo()
(f.first.s() | f.last.s(this=f)).apply_async()
# prints 1
The important part here is that 'this' is passed into the last.s subtask.
Hopefully issue 2137 is recognized as an issue and fixed and this hack is
no longer required.
"""
@wraps(func)
def inner(*args, **kwargs):
try:
self = kwargs.pop('this')
if len(args) >= 1 and self == args[0]:
# Make the assumption that the first argument hasn't been passed in twice...
raise KeyError()
return func(self, *args, **kwargs)
except KeyError:
# 'this' wasn't passed, all we can do is assume normal innovation
return func(*args, **kwargs)
return inner
| {
"repo_name": "alexhayes/django-toolkit",
"path": "django_toolkit/celery/decorators.py",
"copies": "1",
"size": "1500",
"license": "mit",
"hash": -2459174211351932400,
"line_mean": 27.3018867925,
"line_max": 113,
"alpha_frac": 0.5673333333,
"autogenerated": false,
"ratio": 4.2979942693409745,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5365327602640975,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def InstanceDecorator(outerFunction):
'''
The decorated class method itself becomes a decorator
that is aware of the class instance's methods and attributes.
The second parameter after self must be the function you are
ultimately decorating.
Usage:
@InstanceDecorator
def StatusCheck(self, function, desiredStatus, ErrorMsg='Error'):
if desiredStatus is self.status():
function()
else:
print (ErrorMsg)
@StatusCheck(False, ErrorMsg='The server is already running!')
def start()
self.server.start()
It is also possible to "finish" giving arguments to the decorated
function from within the self aware decorator.
'''
@wraps(outerFunction)
def decorator(*decArgs, **decKwargs):
def wrapper(innerFunction):
@wraps(innerFunction)
def inner(self, *funcArgs, **funcKwargs):
@wraps(innerFunction)
def f(*innerArgs, **innerKwargs):
#===========================================================
# We wrap the innerFunction one last time so that we can
# pass supplemental arguments while in the decorating method.
# It also allows us to make reference to the innerFunction's
# attributes (i.e. func_name, doc string, etc.) while in the
# decorator (these things are lost when using functools.partial).
# Plus it is much more helpful to see the proper traceback if you do
# something bad with the innerFunction while in the decorator.
#===========================================================
finalArgs = innerArgs + funcArgs
innerKwargs.update(funcKwargs)
return innerFunction(self, *finalArgs, **innerKwargs)
return outerFunction(self, f, *decArgs, **decKwargs)
return inner
return wrapper
return decorator | {
"repo_name": "christopher-henderson/Experiments",
"path": "misc/instanceDecorator.py",
"copies": "1",
"size": "2085",
"license": "mit",
"hash": 8887916795642899000,
"line_mean": 42.4583333333,
"line_max": 88,
"alpha_frac": 0.5697841727,
"autogenerated": false,
"ratio": 5.238693467336684,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6308477640036684,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def iterit(*args, **kwargs):
"""
This takes some input (int, string, list, iterable, whatever) and
makes sure it is an iterable, making it a single item list if not.
Importantly, it does rational things with strings.
You can pass it more than one item. Cast is optional.
def foo(offsets=10):
offsets = iterit(offsets, cast=int)
for f in offsets:
print "Value %s" % (10 + f)
>>> foo()
Value 20
>>> foo(3)
Value 13
>>> foo([1,2])
Value 11
Value 12
>>> foo('3')
Value 13
>>> foo(('3', 4))
Value 13
Value 14
Also useful this way:
foo, bar = iterit(foo, bar)
"""
if len(args) > 1:
return [iterit(arg, **kwargs) for arg in args]
return map(kwargs.get('cast', None),
args[0] if hasattr(args[0], '__iter__') else [args[0], ])
def start_gen(f):
@wraps(f)
def wrapper(*args, **kwargs):
gen = f(*args, **kwargs)
gen.next()
return gen
return wrapper
| {
"repo_name": "PaulMcMillan/eagleeye",
"path": "eagleeye/utils.py",
"copies": "2",
"size": "1051",
"license": "bsd-2-clause",
"hash": -6204193804945946000,
"line_mean": 23.4418604651,
"line_max": 72,
"alpha_frac": 0.5547098002,
"autogenerated": false,
"ratio": 3.5268456375838926,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002093914283168493,
"num_lines": 43
} |
from functools import wraps
def keymemo(key):
"""memoize decorator that applies the function key to the arguments
in order to retrieve the key to use in the cache dictionary"""
def memo(fn):
"""the memoize decorator itself"""
cache = {}
@wraps(fn)
def _fn(*args, **kwargs):
K = key(*args, **kwargs)
try: ret = cache[K]
except: ret = cache[K] = fn(*args, **kwargs)
return ret
_fn._cache = cache
return _fn
return memo
# the classical memoize decorator (uses the identity function as key function)
memo = keymemo(key=lambda x: x)
def classkeymemo(key):
"""memoize decorator tat applies the function key to the arguments.
This decorator can be used for class methods, and each instance
keeps its own cache."""
def classmemo(fn):
"""the classmemoize decorator itself"""
def _get_cache(self, fn):
"""cache is stored in the self namespace, retrieved at runtime"""
cache_name = '_cache_' + fn.__name__
try:
return getattr(self, cache_name)
except:
setattr(self, cache_name, {})
return getattr(self, cache_name)
@wraps(fn)
def _fn(self, *args, **kwargs):
cache = _get_cache(self, fn)
K = key(self, *args, **kwargs)
try: ret = cache[K]
except: ret = cache[K] = fn(self, *args, **kwargs)
return ret
return _fn
return classmemo
# the classmemo decorator (uses the identity function as key function)
classmemo = classkeymemo(key=lambda self, x: x)
| {
"repo_name": "enricobacis/pg-distopt",
"path": "distopt/memo.py",
"copies": "1",
"size": "1687",
"license": "mit",
"hash": 5107233561582119000,
"line_mean": 28.5964912281,
"line_max": 78,
"alpha_frac": 0.5696502667,
"autogenerated": false,
"ratio": 4.134803921568627,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5204454188268627,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.