text stringlengths 0 1.05M | meta dict |
|---|---|
from functools import wraps
import atexit
import requests
from bs4 import BeautifulSoup
HEADERS = {'User-Agent': 'Pinote-Better.py'}
class PinoteError(Exception):
@staticmethod
def error_handler(msg=''):
def error_handler_decorator(func):
@wraps(func)
def func_wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
raise PinoteError("Error while executing '{}' -- {}\n{}"
.format(func.func_name, repr(e), msg))
return func_wrapper
return error_handler_decorator
class Pinote(object):
_cached_session = None
_delete_token = None
def __init__(self, username, password):
self.username = username
self.password = password
self.basic_auth = requests.auth.HTTPBasicAuth(username, password)
self.post_auth = {'username': username, 'password': password}
@property
def __session(self):
if not self._cached_session:
self.__login()
return self._cached_session
def __login(self):
self._cached_session = requests.Session()
r = self._cached_session.post(
'https://pinboard.in/auth/', data=self.post_auth,
headers=HEADERS, allow_redirects=False)
r.raise_for_status()
if 'error' in r.headers.get('location'):
self._cached_session = None
raise Exception('Invalid login')
@atexit.register
def __del__(self):
try:
self._cached_session.get('https://pinboard.in/logout/', allow_redirects=False)
except Exception:
pass
@PinoteError.error_handler()
def add_note(self, title, note, tags, use_markdown=False, public=False):
visibility = 'public' if public else 'private'
data = {
'title': title,
'note': note,
'tags': tags,
'use_markdown': '1' if use_markdown else '0',
'submit': 'save ' + visibility,
'action': 'save_' + visibility
}
r = self.__session.post('https://pinboard.in/note/add/',
data=data, headers=HEADERS,
allow_redirects=False)
r.raise_for_status()
@PinoteError.error_handler()
def get_all_notes(self):
r = requests.get('https://api.pinboard.in/v1/notes/list?format=json',
auth=self.basic_auth)
r.raise_for_status()
return r.json()
@PinoteError.error_handler()
def get_note_details(self, note_id):
r = requests.get('https://api.pinboard.in/v1/notes/{}?format=json'.format(note_id),
auth=self.basic_auth)
r.raise_for_status()
return r.json()
@PinoteError.error_handler()
def get_note_html(self, note_id):
r = self.__session.get(
'https://notes.pinboard.in/u:{}/notes/{}'.format(self.username, note_id))
r.raise_for_status()
html = r.text
soup = BeautifulSoup(html, "lxml")
note_html = soup.find('blockquote', {'class': 'note'})
return note_html
@PinoteError.error_handler()
def edit_note(self, title, note, note_id, use_markdown=False):
data = {
'slug': note_id,
'action': 'update',
'title': title,
'note': note,
'use_markdown': 'on' if use_markdown else 'off'
}
r = self.__session.post('https://notes.pinboard.in/u:{}/notes/{}/edit/'
.format(self.username, note_id), data=data, headers=HEADERS)
r.raise_for_status()
@PinoteError.error_handler()
def delete_note(self, note_id):
if not self._delete_token:
r = self.__session.get(
'https://notes.pinboard.in', headers=HEADERS)
r.raise_for_status()
html = r.text
soup = BeautifulSoup(html, "lxml")
self._delete_token = soup.find(
'input', {'name': 'token'}).get('value')
data = {
'token': self._delete_token,
'action': 'delete_note',
'id': note_id
}
r = self.__session.post('https://notes.pinboard.in/',
data=data, headers=HEADERS)
r.raise_for_status()
| {
"repo_name": "HyShai/PinoteBetter",
"path": "pinote.py",
"copies": "1",
"size": "4423",
"license": "mit",
"hash": -5297122452093000000,
"line_mean": 33.2868217054,
"line_max": 92,
"alpha_frac": 0.5369658603,
"autogenerated": false,
"ratio": 3.9632616487455197,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9998512812481365,
"avg_score": 0.0003429393128309263,
"num_lines": 129
} |
from functools import wraps
import base64
import json
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from simpleoncall.api import APIResponse
from simpleoncall.models import TeamMember, APIKey
from simpleoncall.internal import InternalResponse
def require_authentication(require_team=True, internal=False):
def wrapped(func):
@wraps(func)
def _wrapped(request, *args, **kwargs):
redirect = None
if not request.user.is_authenticated():
redirect = reverse('login')
elif require_team:
teams = TeamMember.objects.filter(user=request.user)
if not teams:
redirect = reverse('create-team')
if redirect:
if internal:
return InternalResponse(redirect=redirect)
return HttpResponseRedirect(redirect)
return func(request, *args, **kwargs)
return _wrapped
return wrapped
def require_selected_team(internal=False):
def wrapped(func):
@wraps(func)
def _wrapped(request, *args, **kwargs):
if not request.session.get('team'):
teams = TeamMember.objects.filter(user=request.user)
if teams:
request.session['team'] = {
'id': teams[0].team.id,
'name': teams[0].team.name,
}
elif internal:
return InternalResponse(redirect=reverse('select-team'))
else:
return HttpResponseRedirect(reverse('select-team'))
team_id, team_name = request.session['team'].values()
teams = TeamMember.objects.filter(user=request.user, team_id=team_id)
if not teams:
if internal:
return InternalResponse(redirect=reverse('select-team'))
else:
return HttpResponseRedirect(reverse('select-team'))
setattr(request, 'team', teams[0].team)
return func(request, *args, **kwargs)
return _wrapped
return wrapped
def parse_body():
def wrapped(func):
@wraps(func)
def _wrapped(request, *args, **kwargs):
data = None
if request.method == 'POST':
data = request.read()
if data:
data = json.loads(data)
request.data = data
return func(request, *args, **kwargs)
return _wrapped
return wrapped
def requires_api_key():
def wrapped(func):
@wraps(func)
def _wrapped(request, *args, **kwargs):
auth = request.META.get('HTTP_AUTHORIZATION')
if not auth:
return APIResponse(error='Not Authorized', status_code=401)
_, auth = auth.split(' ')
username, password = base64.b64decode(auth).split(':')
api_key = APIKey.objects.get(username=username, password=password)
if not api_key:
pass
setattr(request, 'api_key', api_key)
return func(request, *args, **kwargs)
return _wrapped
return wrapped
| {
"repo_name": "simpleoncall/simpleoncall",
"path": "simpleoncall/decorators.py",
"copies": "1",
"size": "3260",
"license": "mit",
"hash": 543894152811791800,
"line_mean": 31.2772277228,
"line_max": 81,
"alpha_frac": 0.5564417178,
"autogenerated": false,
"ratio": 4.690647482014389,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5747089199814389,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import base64
from django.http import HttpResponse
from django.contrib.auth import authenticate, login
from django.conf import settings
from ping.defaults import PING_BASIC_AUTH
def http_basic_auth(func):
"""
Attempts to login user with u/p provided in HTTP_AUTHORIZATION header.
If successful, returns the view, otherwise returns a 401.
If PING_BASIC_AUTH is False, then just return the view function
Modified code by:
http://djangosnippets.org/users/bthomas/
from
http://djangosnippets.org/snippets/1304/
"""
@wraps(func)
def _decorator(request, *args, **kwargs):
if getattr(settings, 'PING_BASIC_AUTH', PING_BASIC_AUTH):
from django.contrib.auth import authenticate, login
if request.META.has_key('HTTP_AUTHORIZATION'):
authmeth, auth = request.META['HTTP_AUTHORIZATION'].split(' ', 1)
if authmeth.lower() == 'basic':
auth = auth.strip().decode('base64')
username, password = auth.split(':', 1)
user = authenticate(username=username, password=password)
if user:
login(request, user)
return func(request, *args, **kwargs)
else:
return HttpResponse("Invalid Credentials", status=401)
else:
return HttpResponse("No Credentials Provided", status=401)
else:
return func(request, *args, **kwargs)
return _decorator | {
"repo_name": "gpennington/django-ping",
"path": "ping/decorators.py",
"copies": "2",
"size": "1580",
"license": "mit",
"hash": -1756463748971201500,
"line_mean": 38.525,
"line_max": 81,
"alpha_frac": 0.6050632911,
"autogenerated": false,
"ratio": 4.463276836158192,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.008207327819321635,
"num_lines": 40
} |
from functools import wraps
import base
import counters
def count(name=None, auto_add_counter=counters.EventCounter):
"""
A shortcut decorator to count the number times a function is called. Uses the :obj:`counters.EventCounter` counter by default.
If the parameter name is not supplied events are reported under the name of the wrapped function.
"""
return _reporting_decorator_context_manager(name, auto_add_counter=auto_add_counter)
def value(name, value, auto_add_counter=counters.AverageWindowCounter):
"""
A shortcut function to report a value of something. Uses the :obj:`counters.AverageWindowCounter` counter by default.
"""
if auto_add_counter:
cntr = base.GLOBAL_REGISTRY.get_counter(name, throw=False)
if not cntr:
base.GLOBAL_REGISTRY.add_counter(auto_add_counter(name), throw=False)
base.THREAD_DISPATCHER.dispatch_event(name, "value", value)
def occurrence(name, auto_add_counter=counters.FrequencyCounter):
"""
A shortcut function reports an occurrence of something. Uses the :obj:`counters.FrequencyCounter` counter by default.
"""
if auto_add_counter:
cntr = base.GLOBAL_REGISTRY.get_counter(name, throw=False)
if not cntr:
base.GLOBAL_REGISTRY.add_counter(auto_add_counter(name), throw=False)
base.THREAD_DISPATCHER.dispatch_event(name, "end", None)
def frequency(name=None, auto_add_counter=counters.FrequencyCounter):
"""
A shortcut decorator to count the frequency in which a function is called. Uses the :obj:`counters.FrequencyCounter` counter by default.
If the parameter name is not supplied events are reported under the name of the wrapped function.
"""
return _reporting_decorator_context_manager(name, auto_add_counter=auto_add_counter)
def time(name=None, auto_add_counter=counters.AverageTimeCounter):
"""
A shortcut decorator to count the average execution time of a function. Uses the :obj:`counters.AverageTimeCounter` counter by default.
If the parameter name is not supplied events are reported under the name of the wrapped function.
"""
return _reporting_decorator_context_manager(name, auto_add_counter=auto_add_counter)
class _reporting_decorator_context_manager(object):
def __init__(self, name, auto_add_counter=None):
self.name = name
self.auto_add_counter = auto_add_counter
if auto_add_counter and name:
# we have a name, we can register things now. O.w. this must be used as a decorator.
# name will be registered then and there.
cntr = base.GLOBAL_REGISTRY.get_counter(name, throw=False)
if not cntr:
base.GLOBAL_REGISTRY.add_counter(auto_add_counter(name), throw=True)
def __call__(self, f):
event_name = self.name
if not self.name:
event_name = f.__name__
# we don't have stored name... counter needs to be registered.
if self.auto_add_counter:
cntr = base.GLOBAL_REGISTRY.get_counter(event_name, throw=False)
if not cntr:
base.GLOBAL_REGISTRY.add_counter(self.auto_add_counter(event_name), throw=True)
@wraps(f)
def wrapper(*args, **kwargs):
base.THREAD_DISPATCHER.dispatch_event(event_name, "start", None)
try:
r = f(*args, **kwargs)
finally:
## make sure calls are balanced
base.THREAD_DISPATCHER.dispatch_event(event_name, "end", None)
return r
return wrapper
def __enter__(self):
if not self.name:
raise Exception("PyCounters context manager used without defining a name.")
base.THREAD_DISPATCHER.dispatch_event(self.name, "start", None)
def __exit__(self, *args, **kwargs):
base.THREAD_DISPATCHER.dispatch_event(self.name, "end", None)
| {
"repo_name": "prismskylabs/pycounters",
"path": "src/pycounters/shortcuts.py",
"copies": "1",
"size": "3961",
"license": "apache-2.0",
"hash": 5936524676374575000,
"line_mean": 40.6947368421,
"line_max": 144,
"alpha_frac": 0.6632163595,
"autogenerated": false,
"ratio": 4.0091093117408905,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0021094112067333603,
"num_lines": 95
} |
from functools import wraps
import bsddb3 as bsddb
import json
import pdb
from sqlalchemy import create_engine, engine
from db import *
def get_cache():
eng = db_connect('cache')
db = eng.connect()
try:
db.execute("create table cache(key varchar, val text)")
except:
pass
return eng, db
def make_cache(f):
@wraps(f)
def _f(self, *args, **kwargs):
try:
key = str(map(str, (f.__name__, self.engine, self.tablename, self.where, self.nbuckets, map(str, args))))
#print key
vals = self._cache.execute('select val from cache where key = %s', key).fetchall()
if len(vals):
return json.loads(vals[0][0])
except Exception as e:
print e
pdb.set_trace()
with self._cache.begin() as txn:
res = f(self, *args, **kwargs)
if key:
self._cache.execute('insert into cache values(%s, %s)', key, json.dumps(res, default=json_handler))
return res
return None
return _f
def json_handler(o):
if hasattr(o, 'isoformat'):
return o.isoformat()
class Summary(object):
def __init__(self, dbname, tablename, nbuckets=50, where=''):
self.dbtype = 'pg'
if 'monetdb' in str(dbname):
self.engine = None
self.db = dbname
self.dbtype = 'monet'
elif isinstance(dbname, engine.base.Engine):
self.engine = dbname
self.db = dbname.connect()
elif isinstance(dbname, engine.base.Connection):
self.engine = dbname.engine
self.db = dbname
else:
self.engine = db_connect(dbname)
self.db = self.engine.connect()
self.tablename = tablename
self.nbuckets = nbuckets
self.where = ''
where = where.strip()
if where:
self.where = 'WHERE %s' % where
self._engine, self._cache = get_cache()
self.nrows = self.get_num_rows()
self.col_types = self.get_columns_and_types()
def __call__(self):
stats = []
for col, typ in self.col_types:
#print "stats for: %s\t%s" % (col, typ)
col_stats = self.get_col_stats(col, typ)
if col_stats is None:
#print "\tgot None"
continue
#print "\tgot %d" % (len(col_stats))
stats.append((col, typ, col_stats))
return stats
def close(self):
try:
self.db.close()
except Exception as e:
print e
pass
try:
self.engine.dispose()
except Exception as e:
print e
pass
try:
self._cache.close()
except Exception as e:
print e
pass
try:
self._engine.dispose()
except Exception as e:
print e
pass
#print self._cache.closed, self.db.closed
def reset_cache(self):
q = """delete from cache where key like '%%%%%s%%%%%s%%%%'""" % (str(self.engine), self.tablename)
self._cache.execute(q)
def query(self, q, *args):
"""
Summaries using other engines only need to override this method
"""
if self.dbtype == 'pg':
print q
return self.db.execute(q, *args).fetchall()
else:
cur = self.db.cursor()
try:
print q
print args
if args:
cur.execute(q, args)
else:
cur.execute(q)
ret = cur.fetchall()
return ret
except:
self.db.rollback()
raise
finally:
cur.close()
@make_cache
def get_num_rows(self):
q = "SELECT count(*) from %s" % self.tablename
return self.query(q)[0][0]
@make_cache
def get_distinct_count(self, col):
q = "SELECT count(distinct %s) FROM %s %s" % (col, self.tablename, self.where)
return self.query(q)[0][0]
@make_cache
def get_column_counts(self, cols):
q = """SELECT %s FROM %s"""
select = ["count(distinct %s)" % col for col in cols]
select = ", ".join(select)
q = q % (select, self.tablename)
counts = tuple(self.query(q)[0])
return dict(zip(cols, counts))
@make_cache
def get_columns_and_types(self):
if self.dbtype == 'pg':
q = """
SELECT attname, pg_type.typname
FROM pg_class, pg_attribute, pg_type
WHERE relname = %s and
pg_attribute.attrelid = pg_class.oid and
pg_type.oid = atttypid and
attnum > 0 and
attisdropped = false;
"""
else:
q = """
SELECT columns.name , columns.type
FROM columns, tables
WHERE tables.name = %s and
tables.id = columns.table_id;
"""
ret = []
for (col, typ) in self.query(q, self.tablename):
ret.append((str(col), str(typ)))
return ret
@make_cache
def get_columns(self):
"""
engine specific way to get table columns
"""
if self.dbtype == 'pg':
q = "select attname from pg_class, pg_attribute where relname = %s and attrelid = pg_class.oid and attnum > 0 and attisdropped = false;"
else:
q = "select columns.name from columns, tables where tables.name = %s and tables.id = columns.table_id;"
ret = []
for (attr,) in self.query(q, self.tablename):
ret.append(str(attr))
return ret
@make_cache
def get_type(self, col_name):
if self.dbtype == 'pg':
q = """SELECT pg_type.typname FROM pg_attribute, pg_class, pg_type where
relname = %s and pg_class.oid = pg_attribute.attrelid and attname = %s and
pg_type.oid = atttypid"""
else:
q = """SELECT columns.type from columns, tables
WHERE tables.name = %s and tables.id = columns.table_id and columns.name = %s;
"""
try:
rows = self.query(q, self.tablename, col_name)
row = rows[0]
return row[0]
except Exception as e:
import traceback
traceback.print_exc()
return None
def get_col_groupby(self, col_name, col_type):
if col_type == None:
return None
groupby = None
if 'time' == col_type:
groupby = self.get_time_stats(col_name)
if 'date' in col_type or 'timestamp' in col_type:
groupby = self.get_date_stats(col_name)
return groupby
@make_cache
def get_col_stats(self, col_name, col_type=None):
if col_type is None:
col_type = self.get_type(col_name)
if col_type.startswith('_'):
return None
numerics = ['int', 'float', 'double', 'numeric']
is_numeric = any([s in col_type for s in numerics])
if self.dbtype == 'pg' and is_numeric:
stats = self.get_numeric_stats(col_name)
return stats
if any([s in col_type for s in ['char', 'text']]):
return self.get_char_stats(col_name)
groupby = self.get_col_groupby(col_name, col_type)
if groupby:
stats = self.get_group_stats(col_name, groupby)
return stats
return None
def get_group_stats(self, col_name, groupby):
q = """select %s as GRP, min(%s), max(%s), count(*)
from %s %s group by GRP
order by GRP limit %d"""
q = q % (groupby, col_name, col_name, self.tablename, self.where, self.nbuckets)
rows = [{ 'val': x, 'count': count, 'range':[minv, maxv]} for (x, minv, maxv, count) in self.query(q)]
return rows
def get_numeric_stats(self, c):
ndistinct = self.get_distinct_count(c)
if ndistinct == 0:
return []
if ndistinct == 1:
if self.where:
q = "SELECT %s from %s %s AND %s is not null"
args = (c, self.tablename, self.where, c)
else:
q = "SELECT %s from %s WHERE %s is not null"
args = (c, self.tablename, c)
val = self.query(q % args)[0][0]
return [{'val': val, 'count': self.nrows, 'range': [val, val]}]
q = """
with bound as (
SELECT min(%s) as min, max(%s) as max, avg(%s) as avg, stddev(%s) as std FROM %s %s
)
SELECT width_bucket(%s::numeric, (avg-2.5*std), (avg+2.5*std), %d) as bucket,
min(%s) as min,
max(%s) as max,
count(*) as count
FROM %s, bound
%s
GROUP BY bucket
"""
q = q % (c, c, c, c, self.tablename, self.where, c, self.nbuckets, c, c, self.tablename, self.where)
q = """
with TMP as (
SELECT 2.5 * stddev(%s) / %d as block FROM %s %s
)
SELECT (%s/block)::int*block as bucket,
min(%s) as min,
max(%s) as max,
count(*) as count
FROM %s, TMP
%s
GROUP BY bucket
ORDER BY bucket
"""
q = q % (c, self.nbuckets, self.tablename, self.where, c, c, c, self.tablename, self.where)
stats = []
for (val, minv, maxv, count) in self.query(q):
if val is None:
stats.append({
'val': None,
'count': count,
'range': [minv, maxv]
})
else:
stats.append({
'val': (maxv+minv)/2.,
'count': count,
'range': [minv, maxv]
})
return stats
def get_char_stats(self, col_name):
q = """select %s as GRP, min(%s), max(%s), count(*)
FROM %s
%s
GROUP BY GRP
ORDER BY count(*) desc
LIMIT %d"""
q = q % (col_name, col_name, col_name, self.tablename, self.where, self.nbuckets)
rows = [{ 'val': x, 'count': count, 'range':[minv, maxv]} for (x, minv, maxv, count) in self.query(q)]
return rows
groupby = col_name
return groupby
return self.get_group_stats(col_name, groupby)
def get_time_stats(self, col_name):
if self.dbtype == 'pg':
return "date_trunc('hour', %s)::time" % col_name
return """cast((%s
- cast(extract(second from %s) as interval second)
- cast(extract(minute from %s) as interval minute))
as time)""" % (col_name, col_name, col_name)
def get_date_stats(self, col_name):
if self.dbtype == "pg":
q = "select max(%s)::date, min(%s)::date, EXTRACT(EPOCH FROM (max(%s::timestamp) - min(%s::timestamp)))/60 as minutes from %s"
else:
q = """select cast(max(%s) as date), cast(min(%s) as date),
cast(max(cast(%s as timestamp)) - min(cast(%s as timestamp)) as bigint)/1000/60 as minutes
from %s"""
q = q % (col_name, col_name, col_name, col_name, self.tablename)
(maxv, minv, nminutes) = self.query(q)[0]
if maxv is None or minv is None or nminutes is None:
return None
ndays = nminutes / 60 / 24
if self.dbtype == 'pg':
var = "%s::timestamp" % col_name
else:
var = "cast(%s as timestamp)" % col_name
if self.dbtype == 'pg':
if ndays == 0:
groupby = "date_trunc('hour', %s)" % var
elif ndays <= 30:
groupby = "date_trunc('day', %s)" % var
elif ndays <= 50 * 7:
groupby = "date_trunc('week', %s)" % var
elif ndays <= 365 * 12:
groupby = "date_trunc('month', %s)" % var
else:
groupby = "date_trunc('year', %s)" % var
else:
if ndays == 0:
groupby = """(%s
- cast(extract(second from %s) as interval second)
- cast(extract(minute from %s) as interval minute))
""" % (var, var, var)
elif ndays <= 30:
groupby = "cast(cast(%s as date) as timestamp)" % var
elif ndays <= 365 * 12:
groupby = "cast(%s as date) - cast(extract(day from %s) as interval day)" % (var, var)
else:
groupby = """(cast(%s as date)
- cast(extract(day from %s) as interval day)
- cast(extract(month from %s) as interval month)
""" % (var, var, var)
return groupby
| {
"repo_name": "sirrice/dbwipes",
"path": "dbwipes/summary.py",
"copies": "1",
"size": "11293",
"license": "mit",
"hash": 7347302531560040000,
"line_mean": 26.3438256659,
"line_max": 142,
"alpha_frac": 0.5644204374,
"autogenerated": false,
"ratio": 3.308819220627014,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.926397438571928,
"avg_score": 0.021853054461546687,
"num_lines": 413
} |
from functools import wraps
import certifi
from flask import session
from grano.core import oauth, app
from grano.lib.serialisation import jsonify
class Stub():
""" A stub authorization handler to sit in for auth methods that
are not currently enabled. """
def __init__(self, name):
self.name = name
def authorize(self, **kwargs):
return jsonify({
'status': 501,
'name': 'Provider not configured: %s' % self.name,
'message': 'There are no credentials given for %s' % self.name,
}, status=501)
def authorized_handler(self, f):
@wraps(f)
def inner(*a, **kw):
return self.authorize()
return inner
PROVIDERS = {
'github': Stub('github'),
'twitter': Stub('twitter'),
'facebook': Stub('facebook')
}
if app.config.get('GITHUB_CLIENT_ID') is not None:
PROVIDERS['github'] = oauth.remote_app('github',
base_url='https://github.com/login/oauth/',
authorize_url='https://github.com/login/oauth/authorize',
request_token_url=None,
access_token_url='https://github.com/login/oauth/access_token',
consumer_key=app.config.get('GITHUB_CLIENT_ID'),
consumer_secret=app.config.get('GITHUB_CLIENT_SECRET'))
PROVIDERS['github']._client.ca_certs = certifi.where()
if app.config.get('TWITTER_API_KEY') is not None:
twitter = oauth.remote_app('twitter',
base_url='https://api.twitter.com/1.1/',
request_token_url='https://api.twitter.com/oauth/request_token',
access_token_url='https://api.twitter.com/oauth/access_token',
authorize_url='https://api.twitter.com/oauth/authenticate',
consumer_key=app.config.get('TWITTER_API_KEY'),
consumer_secret=app.config.get('TWITTER_API_SECRET'))
@twitter.tokengetter
def get_twitter_token(token=None):
return session.get('twitter_token')
PROVIDERS['twitter'] = twitter
if app.config.get('FACEBOOK_APP_ID') is not None:
facebook = oauth.remote_app('facebook',
base_url='https://graph.facebook.com/',
request_token_url=None,
access_token_url='/oauth/access_token',
authorize_url='https://www.facebook.com/dialog/oauth',
consumer_key=app.config.get('FACEBOOK_APP_ID'),
consumer_secret=app.config.get('FACEBOOK_APP_SECRET'),
request_token_params={'scope': 'email'})
@facebook.tokengetter
def get_facebook_token(token=None):
return session.get('facebook_token')
PROVIDERS['facebook'] = facebook
| {
"repo_name": "4bic/grano",
"path": "grano/providers.py",
"copies": "4",
"size": "2563",
"license": "mit",
"hash": 3720114015045215000,
"line_mean": 32.2857142857,
"line_max": 75,
"alpha_frac": 0.6394849785,
"autogenerated": false,
"ratio": 3.6354609929078014,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6274945971407802,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import click
from botocore.exceptions import ClientError
from commands import command
import listobj
def handle_not_subscribed(user, address, list_address):
if user == address:
click.echo(
'You are not subscribed to {}.'.format(list_address),
err=True)
else:
click.echo(
'{} is not subscribed to {}.'.format(address, list_address),
err=True)
def handle_insufficient_permissions(action):
click.echo(
'You do not have sufficient permissions to {}.'.format(action),
err=True)
def handle_invalid_list_address(list_address):
click.echo('{} is not a valid list address.'.format(list_address), err=True)
def require_list(f):
@wraps(f)
@click.pass_context
def wrapper(ctx, *args, **kwargs):
try:
ctx.obj.listobj = listobj.List(ctx.obj.list_address)
except ( ValueError, ClientError, ):
handle_invalid_list_address(ctx.obj.list_address)
ctx.obj.listobj = None
if ctx.obj.listobj is None:
return
return f(ctx, *args, **kwargs)
return wrapper
@command.group(name='list')
@click.argument('list_address')
@click.pass_context
def list_command(ctx, list_address):
ctx.obj.list_address = list_address
@list_command.command()
@click.argument('address', required=False)
@require_list
def subscribe(ctx, address=None):
if address is None:
address = ctx.obj.user
try:
ctx.obj.listobj.user_subscribe_user(ctx.obj.user, address)
click.echo('{} has been subscribed to {}.'.format(address, ctx.obj.list_address))
except listobj.InsufficientPermissions:
handle_insufficient_permissions('subscribe {} to {}.'.format(address, ctx.obj.list_address))
except listobj.AlreadySubscribed:
click.echo('{} is already subscribed to {}.'.format(address, ctx.obj.list_address), err=True)
except listobj.ClosedSubscription:
handle_invalid_list_address(ctx.obj.list_address)
@list_command.command()
@click.argument('address', required=False)
@require_list
def unsubscribe(ctx, address=None):
if address is None:
address = ctx.obj.user
try:
ctx.obj.listobj.user_unsubscribe_user(ctx.obj.user, address)
click.echo('{} has been unsubscribed from {}.'.format(address, ctx.obj.list_address))
except listobj.InsufficientPermissions:
handle_insufficient_permissions('unsubscribe {} from {}.'.format(address, ctx.obj.list_address))
except listobj.NotSubscribed:
handle_not_subscribed(ctx.obj.user, address, ctx.obj.list_address)
except listobj.ClosedUnsubscription:
click.echo('{} does not allow members to unsubscribe themselves. Please contact the list administrator to be removed from the list.'.format(ctx.obj.list_address), err=True)
def accept_invitation(ctx, token, action, success_msg):
from control import ExpiredSignatureException, InvalidSignatureException
try:
action(ctx.obj.user, token)
click.echo(success_msg)
except ExpiredSignatureException:
click.echo('The invitation has expired.', err=True)
except InvalidSignatureException:
click.echo('The invitation is not valid for {}.'.format(ctx.obj.user), err=True)
except listobj.AlreadySubscribed:
click.echo('You are already subscribed to {}.'.format(ctx.obj.list_address), err=True)
except listobj.NotSubscribed:
click.echo('You are not subscribed to {}.'.format(ctx.obj.list_address), err=True)
@list_command.command()
@click.argument('token')
@require_list
def accept_subscription_invitation(ctx, token):
accept_invitation(
ctx,
token,
ctx.obj.listobj.accept_subscription_invitation,
'You are now subscribed to {}.'.format(ctx.obj.list_address),
)
@list_command.command()
@click.argument('token')
@require_list
def accept_unsubscription_invitation(ctx, token):
accept_invitation(
ctx,
token,
ctx.obj.listobj.accept_unsubscription_invitation,
'You are no longer subscribed to {}.'.format(ctx.obj.list_address),
)
def ctx_set_member_flag_value(ctx, address, flag, value):
if flag is None:
try:
click.echo('Available flags:')
for flag, value in ctx.obj.listobj.user_own_flags(ctx.obj.user):
click.echo('{}: {}'.format(flag.name, value))
except listobj.NotSubscribed:
handle_not_subscribed(ctx.obj.user, ctx.obj.user, ctx.obj.list_address)
return
if address is None:
address = ctx.obj.user
try:
ctx.obj.listobj.user_set_member_flag_value(ctx.obj.user, address, flag, value)
click.echo('{} flag {} on {}.'.format('Set' if value else 'Unset', flag, address))
except listobj.NotSubscribed:
handle_not_subscribed(ctx.obj.user, address, ctx.obj.list_address)
except listobj.InsufficientPermissions:
handle_insufficient_permissions('change the {} flag on {}.'.format(flag, address))
except listobj.UnknownFlag:
click.echo('{} is not a valid flag.'.format(flag), err=True)
@list_command.command()
@click.argument('flag', required=False)
@click.argument('address', required=False)
@require_list
def setflag(ctx, flag=None, address=None):
ctx_set_member_flag_value(ctx, address, flag, True)
@list_command.command()
@click.argument('flag', required=False)
@click.argument('address', required=False)
@require_list
def unsetflag(ctx, flag=None, address=None):
ctx_set_member_flag_value(ctx, address, flag, False)
@list_command.command(name='set')
@click.argument('option', required=False)
@click.argument('value', required=False)
@click.option('--true', 'boolean', flag_value=True)
@click.option('--false', 'boolean', flag_value=False)
@click.option('--int', 'integer', default=None, type=int)
@require_list
def set_config(ctx, option=None, value=None, boolean=None, integer=None):
if option is None:
try:
click.echo('Configuration for {}:'.format(ctx.obj.list_address))
for option, value in ctx.obj.listobj.user_config_values(ctx.obj.user):
click.echo('{}: {}'.format(option, value))
except listobj.InsufficientPermissions:
handle_insufficient_permissions('view options on {}.'.format(ctx.obj.list_address))
return
if boolean is not None:
value = boolean
elif integer is not None:
value = integer
try:
ctx.obj.listobj.user_set_config_value(ctx.obj.user, option, value)
click.echo('Set {} to {} on {}.'.format(option, value, ctx.obj.list_address))
except listobj.InsufficientPermissions:
handle_insufficient_permissions('change {} on {}.'.format(option, ctx.obj.list_address))
except listobj.UnknownOption:
click.echo('{} is not a valid configuration option.'.format(option), err=True)
@list_command.command()
@require_list
def members(ctx):
try:
click.echo('Members of {}:'.format(ctx.obj.list_address))
for m in ctx.obj.listobj.user_get_members(ctx.obj.user):
click.echo(m)
except listobj.InsufficientPermissions:
handle_insufficient_permissions('view the members of {}.'.format(ctx.obj.list_address))
@list_command.group(name='mod')
@require_list
def moderate(ctx):
pass
@moderate.command()
@click.argument('message_id')
@require_list
def approve(ctx, message_id):
try:
ctx.obj.listobj.user_mod_approve(ctx.obj.user, message_id)
click.echo('Post approved.')
except listobj.InsufficientPermissions:
handle_insufficient_permissions('moderate messages on {}.'.format(ctx.obj.list_address))
except listobj.ModeratedMessageNotFound:
click.echo('Message not found. It may already have been acted on.', err=True)
@moderate.command()
@click.argument('message_id')
@require_list
def reject(ctx, message_id):
try:
ctx.obj.listobj.user_mod_reject(ctx.obj.user, message_id)
click.echo('Post rejected.')
except listobj.InsufficientPermissions:
handle_insufficient_permissions('moderate messages on {}.'.format(ctx.obj.list_address))
except listobj.ModeratedMessageNotFound:
click.echo('Message not found. It may already have been acted on.', err=True)
| {
"repo_name": "ilg/LambdaMLM",
"path": "lambda/control/list_commands.py",
"copies": "1",
"size": "8395",
"license": "mit",
"hash": -6882759350887754000,
"line_mean": 38.0465116279,
"line_max": 181,
"alpha_frac": 0.6719475878,
"autogenerated": false,
"ratio": 3.6723534558180226,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9795203249898352,
"avg_score": 0.009819558743933941,
"num_lines": 215
} |
from functools import wraps
import collections
import crontab
import django
from django.core.management import get_commands, load_command_class
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
from kronos.settings import PROJECT_MODULE, KRONOS_PYTHON, KRONOS_MANAGE, \
KRONOS_PYTHONPATH, KRONOS_POSTFIX, KRONOS_PREFIX, KRONOS_BREADCRUMB
from django.conf import settings
from kronos.version import __version__
import six
from django.utils.module_loading import autodiscover_modules
Task = collections.namedtuple('Task', ['name', 'schedule', 'command', 'function'])
registry = set()
def load():
"""
Load ``cron`` modules for applications listed in ``INSTALLED_APPS``.
"""
autodiscover_modules('cron')
if PROJECT_MODULE:
if '.' in PROJECT_MODULE.__name__:
try:
import_module('%s.cron' % '.'.join(
PROJECT_MODULE.__name__.split('.')[0:-1]))
except ImportError as e:
if 'No module named' not in str(e):
print(e)
# load django tasks
for cmd, app in get_commands().items():
try:
load_command_class(app, cmd)
except django.core.exceptions.ImproperlyConfigured:
pass
KRONOS_TEMPLATE = \
'%(prefix)s %(python)s %(manage)s ' \
'runtask %(name)s %(passed_args)s --settings=%(settings_module)s ' \
'%(postfix)s'
DJANGO_TEMPLATE = \
'%(prefix)s %(python)s %(manage)s ' \
'%(name)s %(passed_args)s --settings=%(settings_module)s ' \
'%(postfix)s'
def process_args(args):
res = []
for key, value in six.iteritems(args):
if isinstance(value, dict):
raise TypeError('Parse for dict arguments not yet implemented.')
if isinstance(value, list):
temp_args = ",".join(map(str, value))
res.append("{}={}".format(key, temp_args))
else:
if value is None:
arg_text = "{}"
elif isinstance(value, str):
arg_text = '{}="{}"'
else:
arg_text = '{}={}'
res.append(arg_text.format(key, value))
return res
def register(schedule, args=None):
def decorator(function):
global registry_kronos, registry_django
passed_args = process_args(args) if args is not None else []
ctx = {
'prefix': KRONOS_PREFIX,
'python': KRONOS_PYTHON,
'manage': KRONOS_MANAGE,
'passed_args': ' '.join(passed_args),
'settings_module': settings.SETTINGS_MODULE,
'postfix': KRONOS_POSTFIX
}
if hasattr(function, 'handle'):
func = None
tmpl = DJANGO_TEMPLATE
name = function.__module__.split('.')[-1]
else:
func = function
tmpl = KRONOS_TEMPLATE
name = function.__name__
command = tmpl % dict(ctx, name=name)
if KRONOS_PYTHONPATH is not None:
command += ' --pythonpath=%s' % KRONOS_PYTHONPATH
registry.add(Task(name, schedule, command, func))
@wraps(function)
def wrapper(*args, **kwargs):
return function(*args, **kwargs)
return wrapper
return decorator
def install():
"""
Register tasks with cron.
"""
load()
tab = crontab.CronTab(user=True)
for task in registry:
tab.new(task.command, KRONOS_BREADCRUMB).setall(task.schedule)
tab.write()
return len(registry)
def printtasks():
"""
Print the tasks that would be installed in the
crontab, for debugging purposes.
"""
load()
tab = crontab.CronTab('')
for task in registry:
tab.new(task.command, KRONOS_BREADCRUMB).setall(task.schedule)
print(tab.render())
def uninstall():
"""
Uninstall tasks from cron.
"""
tab = crontab.CronTab(user=True)
count = len(list(tab.find_comment(KRONOS_BREADCRUMB)))
tab.remove_all(comment=KRONOS_BREADCRUMB)
tab.write()
return count
def reinstall():
return uninstall(), install()
| {
"repo_name": "joshblum/django-kronos",
"path": "kronos/__init__.py",
"copies": "1",
"size": "4153",
"license": "mit",
"hash": 172252014169007330,
"line_mean": 25.9675324675,
"line_max": 82,
"alpha_frac": 0.5870455093,
"autogenerated": false,
"ratio": 3.8704566635601116,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9957110997770141,
"avg_score": 0.00007823501799405415,
"num_lines": 154
} |
from functools import wraps
import copy
import os
import inspect
import math
import maya.cmds as cmds
import pymel.core as pm
import pymel.core.datatypes as dt
import maya.mel as mm
versionList=['v',0.2,2017,10,1]
version= ".".join([str(o) for o in versionList])
#################### Global Variable
sceneUnit= pm.currentUnit(l=True,q=True)
timeRange=1
pickMethod=1
springMethod=1
startFrame = int(pm.playbackOptions(q=True,minTime=True))
endFrame = int(pm.playbackOptions(q=True,maxTime=True))
detailValue = 1
falloffValue = 0
dampValue=0.1
stiffValue=0.25
springValue=0.3
twistValue=0.3
loopValue=False
scriptName = inspect.getframeinfo(inspect.currentframe()).filename
scriptPath = os.path.dirname(os.path.abspath(scriptName))
scriptPath = "/".join(scriptPath.split('\\'))
#mm.eval('source "%s/driveJointsWithHair.mel";' % scriptPath)
#################### UI Function Assign
#################### UI definition
#################### Function Definition
######## Misc Function
def getTimeRange(*arg):
global startFrame
global endFrame
if timeRange:
startFrame = int(pm.playbackOptions(q=True,minTime=True))
endFrame = int(pm.playbackOptions(q=True,maxTime=True))
else:
startFrame=startFrame
endFrame=endFrame
sfef =(startFrame,endFrame)
return sfef
def alignOb(alignOb,ob):
alignObMatrix = pm.xform(alignOb,ws=True,q=True,m=True)
pm.xform(ob,m=alignObMatrix)
def constraintOb(ob,tar):
pm.pointConstraint(ob,tar)
pm.orientConstraint(ob,tar)
def getTranslate(ob):
tr = tuple(pm.xform(ob,ws=True,q=True,rp=True))
return tr
def clearKeys(sfef):
pm.cutKey(time=sfef)
######## Classic Spring Magic
def springPasteBonePose():
print type(springUpAxis_comboBox.getSelect())
def springBindPose():
pm.runtime.GoToBindPose()
def springStraightBonePose(bone):
boneChain = getBoneChain(bone)
if boneChain:
for bone in boneChain[:-1]:
bone.setRotation([0,0,0])
bone.setAttr('rotateAxis', [0,0,0])
bone.setAttr('jointOrient', [0,0,0])
def createEndJoint(bone):
print bone
if bone.getParent():
jointRoot=bone.getParent()
print jointRoot
poList=[]
for j in [bone,jointRoot]:
print j
poList.append(dt.Vector(pm.xform(j,q=True,ws=True,t=True)))
endJointPos=(poList[0]-poList[1])*2+poList[0]
print dt.length(poList[1]-poList[0])
pm.select(bone,r=True)
endJoint=pm.joint(p=endJointPos)
pm.joint(bone,e=True,zso=True,oj='xyz')
return endJoint
def createBoneFromSelection():
bonObs =[]
selection = pm.selected()
if not selection:
print "No Selection"
return
obRoot = selection[0].getParent()
pm.select(obRoot)
index = 0
while index<len(selection):
print index
ob=selection[index]
bone = pm.joint(p=getTranslate(ob))
bonObs.append((bone,ob))
#constraintOb(bonObs[index][0],bonObs[index][1])
index+=1
#obRoot = pm.listRelatives(bonObs[0][1],p=True)
endJoint=createEndJoint(bonObs[len(bonObs)-1][0])
for obs in bonObs:
pm.orientConstraint(obs[0],obs[1],mo=True)
pm.pointConstraint(obs[0],obs[1],mo=True)
bonObs.append((endJoint,None))
print bonObs
return bonObs
def createBone(Ob):
pm.select(Ob,r=True)
bonObs =[]
obRoot = pm.listRelatives(Ob,p=True)
obChain = getBoneChain(Ob)
for s in obChain:
index = obChain.index(s)
bone =pm.joint(p=getTranslate(s))
bonObs.append((pm.ls(bone)[0],s))
if index==0:
pm.parent(bone,obRoot)
else:
pm.joint(str(bonObs[index-1][0]),e=True,zso=True,oj='xyz')
#pm.orientConstraint(bone,s,mo=True)
endJoint=createEndJoint(bonObs[len(bonObs)-1][0])
for obs in bonObs:
pm.orientConstraint(obs[0],obs[1],mo=True)
bonObs.append((endJoint,None))
return bonObs
def getBoneChain(bone):
print bone
# only apply on child bone, bacause need a parent bone move to cause the overlapping
if not bone.getParent():
return False
# get bone chain, only get one stream, will not process branches
boneChain = []
boneChain.append( bone )
childList = pm.listRelatives(bone,ad=1,typ='transform')
childList.reverse()
boneChain.extend(childList)
return boneChain
def springApply(pickedBone, pickedBones,springLoop=False,springRotateRate=0.3,springTwist=0.3):
'''
Get idea from max scripts 'SpringMagic' made by Sichuang Yuan, but make it more friendly with Maya
'''
# since maya 2016, there is a new animation evaluation mode called "Parallel" which supose to improve
# the runtime performance of maya, but the new function seems cause some problem when calculate spring magic.
# so I'll revert evaluation mode back to old way during calculation and set it back after
# store evaluationManager mode for 2016 or newer
if pm.versions.current() > 201600:
currentMode = pm.evaluationManager( q=1, mode = 1 )[0]
pm.evaluationManager( mode = 'off' )
# Check Unit
if sceneUnit!='cm':
pm.currentUnit(l='cm')
# get pickedBone chain, only get one stream, will not process branches
if pm.nodeType(pickedBone)=='joint':
boneChain = getBoneChain(pickedBone)
else:
boneObs = createBone(pickedBone)
boneChain=[b[0] for b in boneObs]
pm.hide(boneChain)
if not boneChain:
return
print pickedBone
boneRoot = boneChain[0].getParent()
# get frame range
pm.currentTime( startFrame, edit=True )
# get pickedBone chain start pose and key it
boneStartRotation = {}
# boneStartMatirx = {}
for bone in boneChain:
# reset bone rotate axis as 0
bone.setRotation = bone.getAttr('rotateAxis')
pm.xform(bone, ra = [0,0,0])
# get bone start rotation pose
boneStartRotation[bone] = bone.getRotation()
# boneStartMatirx[bone] = bone.getAttr('matrix')
if not springLoop:
pm.setKeyframe(bone, attribute = 'rotate')
# delete key not at start frame
pm.cutKey( bone, time=(startFrame + 1,endFrame) )
# get bone start world translation
boneWorldTranlation = {}
previousBoneWorldTranlation = {}
previousBoneWorldRotation = {}
previousBoneRotateAxis = None
loopCount = float(springLoop)
pickedBoneCount = float(len(pickedBones))
boneChainCount = float(len(boneChain))
frameCount = float(endFrame-startFrame)
# caculate part
for loop in range( int(loopCount+1) ):
for frame in range( startFrame, endFrame+1 ):
pm.currentTime( frame, edit=True )
for bone in boneChain:
# get bone world translation & matrix
boneWorldTranlation[bone] = dt.Vector( pm.xform(bone, worldSpace = 1, translation = 1, query = 1) )
# skip caculate at first frame
if previousBoneWorldTranlation:
# skip end bone
if not bone == boneChain[-1]:
# get child bone name for caculate
childBone = boneChain[boneChain.index(bone) + 1]
# get the vector from current position to previous child position
boneAimVector = (boneWorldTranlation[bone] - previousBoneWorldTranlation[childBone]).normal()
# restore current bone rotation
boneCurrentRotation = bone.getRotation()
# reset bone to start pose
bone.setAttr('rotate', boneStartRotation[bone])
childBoneHight = pm.xform( childBone, worldSpace = 1, translation = 1, query = 1 )
# get the vector of stance pose
stanceTranslation = ( childBone.getAttr('matrix') * childBone.getAttr('parentMatrix') ).translate
boneStanceVetor = (boneWorldTranlation[bone] - stanceTranslation).normal()
# get rotation axis and degrees bewteen two vectors
boneRotateDegrees = math.degrees( boneStanceVetor.angle( boneAimVector ) )
boneRotateAxis = boneStanceVetor.axis( boneAimVector )
# if the rotate axis fliped
if previousBoneRotateAxis:
if abs(math.degrees( previousBoneRotateAxis.angle( boneRotateAxis ))) > 90:
# then flip it back
boneRotateAxis = -boneRotateAxis
boneRotateDegrees = -boneRotateDegrees
previousBoneRotateAxis = boneRotateAxis
# set rotate rate
rotateRate = 1-float(springRotateRate)
upVector = dt.Vector(0,1,0)
rotateValue = upVector * (boneRotateDegrees*rotateRate)
# skip rotate bone if very close to start pose
if abs(boneRotateDegrees) > 0.001:
# match up bone and stance vector with aim constraint
lct = pm.spaceLocator()
lct.setTranslation( stanceTranslation )
cns = pm.aimConstraint( lct, bone, aimVector = [1,0,0], upVector = upVector, worldUpVector = boneRotateAxis )
# keep aim result before remove constraint
pm.setKeyframe(bone, attribute = 'rotate')
pm.delete( cns, lct )
# do rotate bone
pm.rotate(bone, rotateValue, objectSpace = 1, relative = 1)
else:
# use previous frame rotation
bone.setAttr('rotate', boneCurrentRotation)
pm.setKeyframe(bone, attribute = 'rotate')
# apply twist
twist = float(springTwist)
if 1 > twist:
# reset rotat x
bone.setAttr('rotateX', boneStartRotation[bone][0] )
# creat locator with stance rotation, slow step setMatrix, need optmaize
lct = pm.spaceLocator()
lct.setRotation(pm.xform(bone,q=1,worldSpace=1,rotation=1))
lct_p = pm.spaceLocator()
lct_p.setRotation(previousBoneWorldRotation[bone])
# get twist delay value
# creat orient constraint
cns_X = pm.orientConstraint( lct, lct_p, bone, skip = ['y','z'] )
# apply twist value to constraint
cns_X.setAttr(lct+'W0', twist)
cns_X.setAttr(lct_p+'W1', 1-twist)
# set rotate interp as shortest
cns_X.setAttr('interpType', 2)
# get caculated x
boneRotateX = bone.getAttr('rotateX')
# apply new rotate x
bone.setAttr('rotateX', boneRotateX)
pm.setKeyframe(bone, attribute = 'rotate')
pm.delete( cns_X, lct, lct_p )
# save for next frame use
previousBoneWorldRotation[bone] = pm.xform(bone,q=1,worldSpace=1,rotation=1)
# print previousBoneWorldRotation[bone]
#runProgressBar( main_progressBar, 1/(loopCount+1)*(1/pickedBoneCount)*(1/boneChainCount)*(1/(frameCount+1))*100 )
# save for next frame use
previousBoneWorldTranlation = copy.copy(boneWorldTranlation)
print pickedBone,boneChain
if pm.nodeType(pickedBone)!='joint':
for o in boneObs:
if o[1]:
pm.bakeResults(o[1],at=['rotate'], t=(startFrame,endFrame))
pm.delete(boneChain)
#return Unit
pm.currentUnit(l=sceneUnit)
# revert evaluationManager mode for 2016 or newer
if pm.versions.current() > 201600:
pm.evaluationManager( mode = currentMode )
####### Hair Base Spring
def bakeAnimFromOb(targetOb,bakeOb,startFrame,endFrame):
f=startFrame
while f<=endFrame:
pm.setCurrentTime(f)
alignOb(targetOb,bakeOb)
pm.setKeyframe(bakeOb,at='translate')
pm.setKeyframe(bakeOb,at='rotate')
f+=1
def bakeAnimTuple(tupleOb,startFrame,endFrame):
f=startFrame
while f<=endFrame:
pm.setCurrentTime(f)
for o in tupleOb:
alignOb(o[0],o[1])
pm.setKeyframe(o[1],at='translate')
pm.setKeyframe(o[1],at='rotate')
f+=1
def makeDynamic(pickedBone):
if sceneUnit!='cm':
pm.currentUnit(l='cm')
if pm.nodeType(pickedBone)=='joint':
boneChain = getBoneChain(pickedBone)
else:
boneObs = createBone(pickedBone)
boneChain=getBoneChain(boneObs[0][0])
print pickedBone,boneObs,boneChain
if not boneChain:
return
pm.select([boneChain[0],boneChain[len(boneChain)-1]],r=True)
driveJointsWithHair(detailValue,falloffValue)
hairHandle=pm.ls('hairHandle1')[0]
hairHandle.setAttr("hairDamping",dampValue)
hairHandle.setAttr("hairStiffness",stiffValue)
if pm.nodeType(pickedBone)=='joint':
pm.bakeResults(pickedBone,at=['rotate'],hi='below',sm=True,t=getTimeRange())
pm.delete('dynJoint*','nucleus*','follicle*')
else:
for o in boneObs:
if o[1]:
pm.bakeResults(o[1],at=['rotate'],t=getTimeRange(),sm=True)
pm.delete(boneChain,'dynJoint*','nucleus*','follicle*',hi='below')
pm.currentUnit(l=sceneUnit)
#bakeIt(boneObs)
def checkPlaySpeed():
playSpeed = pm.playbackOptions(q=True,ps=True)
if playSpeed:
cD=pm.confirmDialog(title="PlaySpeedCheck",message="For best results please set playback speed to: play every frame\n\n",messageAlign='Center',button=["Yes","No"],defaultButton="Yes",cancelButton="No",dismissString="No")
if cD == "Yes":
pm.playbackOptions(ps=0)
def driveJointsWithHair(detail,falloff):
mm.eval('driveJointsWithHair %s %s;' % (detail,falloff))
pm.hide('hairHandle*','hairSystem*')
############ Main Function
def springIt(method):
if pickMethod:
if pm.ls(sl=1, type='joint'):
pickedBones = pm.ls(sl=1, type='joint')
elif pm.ls(sl=1):
pickedBones = pm.ls(sl=1)
else:
return False
else:
if pm.ls(sl=1) and len(pm.ls(sl=1))>1:
boneLink=createBoneFromSelection()
pickedBones=[boneLink[0][0]]
else:
return False
### Execution
#pm.textField(progressControlID,e=True,tx="..Running..")
playOp = pm.playbackOptions(q=True,loop=True)
pm.playbackOptions(loop='once')
pm.currentTime(getTimeRange()[0],e=True)
mm.eval("paneLayout -e -m false $gMainPane")
for bone in pickedBones:
if method:
makeDynamic(bone)
#pm.play()
else:
mm.eval("paneLayout -e -m true $gMainPane")
springApply(bone,pickedBones,springLoop=loopValue,springRotateRate=springValue,springTwist=twistValue)
if not pickMethod:
for o in boneLink:
pm.bakeResults(o,at=['translate','rotate'],t=getTimeRange(),sm=True)
pm.delete(pickedBones,hi=True)
mm.eval("paneLayout -e -m true $gMainPane")
pm.playbackOptions(loop=playOp)
#pm.evalDeferred('pm.textField(progressControlID,e=True,tx="...Finish...")')
############ UI Function
def nulldef():
print(tempJoints)
def removeUI():
pm.deleteUI('makeSpringWin')
def changeDVal(val):
global detailValue
detailValue=val
def changeFVal(val):
global falloffValue
falloffValue=val
def changeDaVal(val):
global dampValue
dampValue=val
def changeStiffVal(val):
global stiffValue
stiffValue=val
def changeSprVal(val):
global springValue
springValue=val
def changeTwsVal(val):
global twistValueValue
twistValue=val
#print twistValue
def changeLoopVal(val):
global loopValue
loopValue=val
#print loopValue
def changeTRangeVal(val):
global timeRange
timeRange=val
#print timeRange
def changeSFVal(val):
global startFrame
startFrame=val
print startFrame
def changeEFVal(val):
global endFrame
endFrame=val
#print endFrame
def changeSpringMethodVal(val):
global springMethod
springMethod=val
if not val:
pm.frameLayout(dynSpringMagicFrameID,e=True,vis=True)
pm.frameLayout(dynHairMagicFrameID,e=True,vis=False)
else:
pm.frameLayout(dynHairMagicFrameID,e=True,vis=True)
pm.frameLayout(dynSpringMagicFrameID,e=True,vis=False)
def changeMethodVal(val):
global pickMethod
pickMethod=val
global springMethod
if val:
pm.radioButton(dynSpringMagicRadioID,e=True,ed=True)
else:
springMethod=1
pm.radioButton(dynHairMagicRadioID,e=True,select=True)
pm.radioButton(dynSpringMagicRadioID,e=True,ed=False,select=False)
#print pickMethod
def InteractivePlayback():
pm.setCurrentTime(pm.playbackOptions(q=True,minTime=True))
mm.eval('InteractivePlayback;')
pm.setCurrentTime(pm.playbackOptions(q=True,minTime=True))
def clearAnim():
clearKeys((startFrame,endFrame))
pm.currentTime(startFrame,edit=True)
def makeSpringUI():
global springButtonID
global dynHairMagicFrameID
global dynSpringMagicFrameID
global dynHairMagicRadioID
global dynSpringMagicRadioID
#global progressControlID
if pm.window('makeSpringWin',ex=True):
pm.deleteUI('makeSpringWin',window=True)
pm.windowPref('makeSpringWin',remove=True)
pm.window('makeSpringWin',menuBar=True,t="Spring Magic Maya %s" % version)
pm.menu(tearOff=False,l="Edit")
pm.menuItem(l="Reset Settings",ann="Reset all",c=lambda *arg:makeSpringUI())
pm.scrollLayout('scrollLayout')
pm.frameLayout(lv=False)
pm.columnLayout(adjustableColumn=1)
pm.rowColumnLayout(numberOfColumns=3,columnWidth=[(1,90),(2,90),(3,90)])
pm.text(label="Pick Method: ")
dynPickMethodID = pm.radioCollection()
pm.radioButton(label="Hierachy",select=True,onc=lambda *arg:changeMethodVal(1))
pm.radioButton(label="Selection",onc=lambda *arg:changeMethodVal(0))
pm.text(label="Spring Method: ")
dynSpringMethodID = pm.radioCollection()
dynHairMagicRadioID= pm.radioButton(label="Hair Magic",select=True,onc=lambda *arg:changeSpringMethodVal(1))
dynSpringMagicRadioID= pm.radioButton(label="Spring Magic",onc=lambda *arg:changeSpringMethodVal(0))
pm.setParent('..')
pm.separator(style='in')
pm.rowColumnLayout(numberOfColumns=6,columnWidth=[(1,90),(2,60),(3,55),(4,45),(5,30),(6,45)],bgc=(0.5,0.5,0.5))
pm.text(label="Key Range: ")
dynkeyRange = pm.radioCollection()
pm.radioButton(label="Active",select=True,onc=lambda *arg:changeTRangeVal(1))
pm.radioButton(label="From: ",onc=lambda *arg:changeTRangeVal(0))
pm.intField(value=startFrame,cc=changeSFVal)
pm.text(label="To: ")
pm.intField(value=endFrame,cc=changeEFVal)
pm.setParent('..')
pm.separator(style='out')
pm.setParent('..')
dynHairMagicFrameID=pm.frameLayout(label='Hair Magic',borderStyle='in')
pm.rowColumnLayout(numberOfColumns=2,columnWidth=[(1,90),(2,180)])
pm.text(label="Hair Damping: ",align='right')
pm.floatField(min=0.0, max=1, value=dampValue, step=0.1, cc=changeDaVal)
pm.text(label="Hair Stiffness: ",align='right')
pm.floatField(min=0.0, max=1, value=stiffValue, step=0.1, cc=changeStiffVal)
pm.setParent('..')
pm.rowColumnLayout(numberOfColumns=4,columnWidth=[(1,90),(2,60),(3,60),(4,85)])
pm.text(label="Hair Stiffness : ",align='right')
dynJointFalloffID = pm.radioCollection()
pm.radioButton(label="Normal",select=True,onc=lambda *arg:changeFVal(0))
pm.radioButton(label="Quick",onc=lambda *arg:changeFVal(1))
pm.radioButton(label="Slow",onc=lambda *arg:changeFVal(2))
pm.text(label="Hair Detail : ",align='right')
dynJointDetailID = pm.radioCollection()
pm.radioButton(label="Low",onc=lambda *arg:changeDVal(0))
pm.radioButton(label="Medium",select=True,onc=lambda *arg:changeDVal(1))
pm.radioButton(label="High",onc=lambda *arg:changeDVal(2))
pm.setParent('..')
pm.setParent('..')
dynSpringMagicFrameID=pm.frameLayout(label='Spring Magic',borderStyle='in',vis=False)
pm.rowColumnLayout(numberOfColumns=3,columnWidth=[(1,120),(2,140),(3,60)])
pm.rowColumnLayout(numberOfColumns=2,columnWidth=[(1,60),(2,60)])
pm.text(label="Spring : ",align='right')
pm.floatField(minValue=0, maxValue=1, value=0.3,editable=True,cc=changeSprVal)
pm.setParent('..')
pm.rowColumnLayout(numberOfColumns=2,columnWidth=[(1,60),(2,60)])
pm.text(label="Twist : ",align='right')
pm.floatField(minValue=0, maxValue=1, value=0.3,editable=True,cc=changeTwsVal)
pm.setParent('..')
pm.checkBox(label="Loop",cc=changeLoopVal)
pm.setParent('..')
pm.setParent('..')
pm.separator(style='in')
pm.rowColumnLayout(numberOfColumns=3,columnWidth=[(1,112),(2,112),(3,112)])
springButtonID= pm.button(label="Do",c="springIt(springMethod); pm.deleteUI('makeSpringWin',window=True)")
pm.button(label= "Apply",c='springIt(springMethod)')
pm.button(label= "Clear",c='clearAnim()')
pm.setParent('..')
#progressControlID=pm.textField(tx="...",ed=False,bgc=(0,.5,0.15),fn='boldLabelFont',h=20)
pm.showWindow()
# Script job
sJob_main_updateUI = pm.scriptJob( event= ["SceneOpened", deleteSpringOptionVars], protected = True )
© 2017 GitHub, Inc.
Terms
Privacy
Security
Status
Help
Contact GitHub
API
Training
Shop
Blog
About
| {
"repo_name": "josephkirk/PipelineTools",
"path": "core/anim_utils.py",
"copies": "1",
"size": "22245",
"license": "bsd-2-clause",
"hash": 1846385856873144800,
"line_mean": 37.0890410959,
"line_max": 228,
"alpha_frac": 0.6223251214,
"autogenerated": false,
"ratio": 3.482154038822793,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9501043604471706,
"avg_score": 0.02068711115021745,
"num_lines": 584
} |
from functools import wraps
import core
import pickle
import os
AX_NAME_REPLACERS = (("",""),("Definition","Def"),
('EdgeCode','Edgecode'))
def Ax(sp):
"""
converts a smartpointer (objects prefixed with SP) object to a AxObject.
if sp is not a smartpointer the object is returned as is.
"""
# if not a smartpointer return sp
if not isinstance(sp, core.smartpointers.AxSmartPointer):
return sp
sp = sp.ResolveSP()
#if sp is a IAAFObjectSP After resolving, return a AxObject
if isinstance(sp, core.smartpointers.IAAFObjectSP):
return core.AxObject(sp)
if not hasattr(sp, "GetClassName"):
return sp
class_name = sp.GetClassName()
#iterators are wrap in a AxIterWraper
if class_name.count("IEnumAAF"):
iterator_name = class_name.replace('IEnumAAF', '')
d = core.AxIter.__dict__
if d.has_key(iterator_name):
class_object = d[iterator_name]
return AxIterWraper(class_object(sp))
iterator_name = iterator_name.rstrip("s")
if d.has_key(iterator_name):
class_object = d[iterator_name]
return AxIterWraper(class_object(sp))
iterator_name = class_name.replace('IEnumAAF', '').replace('ies', 'y')
if d.has_key(iterator_name):
class_object = d[iterator_name]
return AxIterWraper(class_object(sp))
else:
raise ValueError("unknown Ax iterator for %s %s" % (class_name, str(sp)) )
else:
class_object = get_AxClass(sp)
if not class_object:
raise ValueError("no AxObject found for %s %s" % (class_name,str(sp)))
return class_object(sp)
def get_AxClass(sp):
"""find the Ax Class for a given smartpointer
"""
if not isinstance(sp, core.smartpointers.AxSmartPointer):
return None
class_name = sp.GetClassName()
d = core.__dict__
for old, new in AX_NAME_REPLACERS:
class_name = class_name.replace(old,new)
ax_name = 'Ax%s' % class_name
if d.has_key(ax_name):
return d[ax_name]
return None
class AxIterWraper(object):
def __init__(self, ax_iter):
self.ax_iter = ax_iter
def __iter__(self):
return self
def next(self):
return Ax(self.ax_iter.next())
def __get_docs():
doc_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),'docs.pkl')
if os.path.exists(doc_file):
f = open(doc_file)
docs = pickle.load(f)
f.close()
return docs
def __AxWrap(d):
skip = ("AxInit")
docs = __get_docs()
for name, obj in d.items():
if name in skip:
pass
elif name.startswith("Ax"):
__AxWrapClass(obj,docs.get(name) or {})
def __AxWrapClass(obj,docs=None):
startswiths = ('_','to','Initialize','CreateInstance')
for name in dir(obj):
if not any([name.startswith(i) for i in startswiths]):
setattr(obj,name, __AxDecorator(getattr(obj,name), docs.get(name)))
if name.startswith('CreateInstance'):
setattr(obj,name, __AxDecoratorStatic(getattr(obj,name)))
def __AxDecoratorStatic(f,docs =None):
@staticmethod
@wraps(f)
def _decorator(*args, **kwargs):
return Ax(f(*args, **kwargs))
return _decorator
def __AxDecorator(f,docs=None):
@wraps(f)
def _decorator(*args, **kwargs):
return Ax(f(*args, **kwargs))
if docs:
_decorator.__doc__ += '\n\n' + docs
return _decorator | {
"repo_name": "markreidvfx/pyaaf_old",
"path": "pyaaf/util.py",
"copies": "1",
"size": "3774",
"license": "mit",
"hash": 5096783859494318000,
"line_mean": 25.7730496454,
"line_max": 85,
"alpha_frac": 0.5582935877,
"autogenerated": false,
"ratio": 3.6080305927342256,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9428308712459179,
"avg_score": 0.047603093595009166,
"num_lines": 141
} |
from functools import wraps
import core
AX_NAME_REPLACERS = (("",""),("Definition","Def"),('EdgeCode','Edgecode'))
def Ax(sp):
"""
converts smartpointer(SP) object to corresponding AxObject.
if sp does not have the a method named GetClassName the object is returned as is
"""
if not hasattr(sp,"GetClassName"):
return sp
class_name = sp.GetClassName()
#iterators are wrap in a AxIterWraper
if class_name.count("IEnumAAF"):
iterator_name = class_name.replace('IEnumAAF', '')
d = core.AxIter.__dict__
if d.has_key(iterator_name):
class_object = d[iterator_name]
return AxIterWraper(class_object(sp))
iterator_name = iterator_name.rstrip("s")
if d.has_key(iterator_name):
class_object = d[iterator_name]
return AxIterWraper(class_object(sp))
iterator_name = class_name.replace('IEnumAAF', '').replace('ies', 'y')
if d.has_key(iterator_name):
class_object = d[iterator_name]
return AxIterWraper(class_object(sp))
else:
raise ValueError(class_name)
else:
class_object = get_AxClass(sp)
if not class_object:
raise ValueError(class_name)
#if sp is already a AxObject simple return it
if isinstance(sp, class_object):
return sp
methodToCall = None
name = class_name
for old, new in AX_NAME_REPLACERS:
name = name.replace(old,new)
method = 'to_%sSP' % name
if hasattr(sp,method):
methodToCall = getattr(sp, method)
return class_object(methodToCall())
def get_AxClass(sp):
if not hasattr(sp,"GetClassName"):
return None
class_name = sp.GetClassName()
d = core.__dict__
for old, new in AX_NAME_REPLACERS:
class_name = class_name.replace(old,new)
ax_name = 'Ax%s' % class_name
if d.has_key(ax_name):
return d[ax_name]
return None
class AxIterWraper(object):
def __init__(self, ax_iter):
self.ax_iter = ax_iter
def __iter__(self):
return self
def next(self):
return Ax(self.ax_iter.next())
def __AxWrap(d):
skip = ("AxInit")
for name, obj in d.items():
if name in skip:
pass
elif name.startswith("Ax"):
__AxWrapClass(obj)
def __AxWrapClass(obj):
startswiths = ('_','to','Initialize','CreateInstance')
for name in dir(obj):
if not any([name.startswith(i) for i in startswiths]):
setattr(obj,name, __AxDecorator(getattr(obj,name)))
if name.startswith('CreateInstance'):
setattr(obj,name, __AxDecoratorStatic(getattr(obj,name)))
def __AxDecoratorStatic(f):
@staticmethod
@wraps(f)
def _decorator(*args, **kwargs):
return Ax(f(*args, **kwargs))
return _decorator
def __AxDecorator(f):
@wraps(f)
def _decorator(*args, **kwargs):
return Ax(f(*args, **kwargs))
return _decorator | {
"repo_name": "DIT-Tools/pyaaf",
"path": "pyaaf/util.py",
"copies": "1",
"size": "3265",
"license": "mit",
"hash": 3081121906868445000,
"line_mean": 26.2166666667,
"line_max": 84,
"alpha_frac": 0.5473200613,
"autogenerated": false,
"ratio": 3.7702078521939955,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4817527913493995,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import datetime
from requests import cookies
import json
import re
import requests
from requests import structures
import sys
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
if sys.version_info >= (3, 0, 0):
basestring = str
class Headers(object):
def __init__(self, res):
self.headers = res.headers
def get_all(self, name, failobj=None):
return self.getheaders(name)
def getheaders(self, name):
return [self.headers.get(name)]
def response(status_code=200, content='', headers=None, reason=None, elapsed=0,
request=None):
res = requests.Response()
res.status_code = status_code
if isinstance(content, dict):
if sys.version_info[0] == 3:
content = bytes(json.dumps(content), 'utf-8')
else:
content = json.dumps(content)
res._content = content
res._content_consumed = content
res.headers = structures.CaseInsensitiveDict(headers or {})
res.reason = reason
res.elapsed = datetime.timedelta(elapsed)
res.request = request
if hasattr(request, 'url'):
res.url = request.url
if isinstance(request.url, bytes):
res.url = request.url.decode('utf-8')
if 'set-cookie' in res.headers:
res.cookies.extract_cookies(cookies.MockResponse(Headers(res)),
cookies.MockRequest(request))
# normally this closes the underlying connection,
# but we have nothing to free.
res.close = lambda *args, **kwargs: None
return res
def all_requests(func):
@wraps(func)
def inner(*args, **kwargs):
return func(*args, **kwargs)
return inner
def urlmatch(scheme=None, netloc=None, path=None, method=None, params=None):
def decorator(func):
@wraps(func)
def inner(self_or_url, url_or_request, *args, **kwargs):
if isinstance(self_or_url, urlparse.SplitResult):
url = self_or_url
request = url_or_request
else:
url = url_or_request
request = args[0]
if scheme is not None and scheme != url.scheme:
return
if netloc is not None and not re.match(netloc, url.netloc):
return
if path is not None and not re.match(path, url.path):
return
if method is not None and method.upper() != request.method:
return
if method is not None and method.upper() != request.method:
return
if params is not None:
if dict(urlparse.parse_qsl(url.query)) != params:
return
print 'using mock response for path %s' % url.path
return func(self_or_url, url_or_request, *args, **kwargs)
return inner
return decorator
def first_of(handlers, *args, **kwargs):
for handler in handlers:
res = handler(*args, **kwargs)
if res is not None:
return res
class HTTMock(object):
"""
Acts as a context manager to allow mocking
"""
STATUS_CODE = 200
def __init__(self, *handlers):
self.handlers = handlers
def __enter__(self):
self._real_session_send = requests.Session.send
def _fake_send(session, request, **kwargs):
response = self.intercept(request)
if isinstance(response, requests.Response):
# this is pasted from requests to handle redirects properly:
kwargs.setdefault('stream', session.stream)
kwargs.setdefault('verify', session.verify)
kwargs.setdefault('cert', session.cert)
kwargs.setdefault('proxies', session.proxies)
allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream')
timeout = kwargs.get('timeout')
verify = kwargs.get('verify')
cert = kwargs.get('cert')
proxies = kwargs.get('proxies')
gen = session.resolve_redirects(
response,
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies)
history = [resp for resp in gen] if allow_redirects else []
if history:
history.insert(0, response)
response = history.pop()
response.history = tuple(history)
return response
return self._real_session_send(session, request, **kwargs)
requests.Session.send = _fake_send
return self
def __exit__(self, exc_type, exc_val, exc_tb):
requests.Session.send = self._real_session_send
def intercept(self, request):
url = urlparse.urlsplit(request.url)
res = first_of(self.handlers, url, request)
if isinstance(res, requests.Response):
return res
elif isinstance(res, dict):
return response(res.get('status_code'),
res.get('content'),
res.get('headers'),
res.get('reason'),
res.get('elapsed', 0),
request)
elif isinstance(res, basestring):
return response(content=res)
elif res is None:
return None
else:
raise TypeError(
"Dont know how to handle response of type {}".format(type(res)))
def with_httmock(*handlers):
mock = HTTMock(*handlers)
def decorator(func):
@wraps(func)
def inner(*args, **kwargs):
with mock:
return func(*args, **kwargs)
return inner
return decorator
| {
"repo_name": "mbylstra/django-wham",
"path": "wham/httmock.py",
"copies": "1",
"size": "5941",
"license": "mit",
"hash": 703179664096848000,
"line_mean": 31.2880434783,
"line_max": 80,
"alpha_frac": 0.5556303653,
"autogenerated": false,
"ratio": 4.497350492051476,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5552980857351476,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import datetime
import json
import os
import re
import urlparse
from flask import Flask, flash, g, redirect, request, render_template, Response
from flask.ext.babel import Babel
import postmark
import pymongo
import requests
LANGUAGES = ('en', 'es')
EMPTY_BLOCK = """<br><br>"""
POSTMARK_KEY = os.environ.get('POSTMARK_KEY', '')
RECAPTCHA_PUBLIC = os.environ.get('RECAPTCHA_PUBLIC', '')
RECAPTCHA_PRIVATE = os.environ.get('RECAPTCHA_PRIVATE', '')
app = Flask(__name__)
app.secret_key = os.environ.get('FLASK_SECRETKEY', '1234567890')
babel = Babel(app)
class CrappyCache(dict):
MINUTE = 60
HOUR = 60 * 60
DAY = 60 * 60 * 24
def __init__(self, *args, **kwargs):
self.expiration = kwargs.pop("expiration", None)
super(CrappyCache, self).__init__(*args, **kwargs)
def __setitem__(self, key, value):
now = datetime.datetime.utcnow()
super(CrappyCache, self).__setitem__(key, (now, value))
def __getitem__(self, key):
if key in self:
(then, val) = super(CrappyCache, self).__getitem__(key)
if self.expiration is None:
return val
now = datetime.datetime.utcnow()
delta = now - then
if delta.seconds < self.expiration:
return val
del self[key]
SCARY_CACHE = CrappyCache(expiration=CrappyCache.MINUTE * 5)
#
# authentication stuff
#
def check_auth(username, password):
return username == 'admin' and password == os.environ.get('ADMIN_PASSWORD', '')
def authenticate():
msg = "This site is not yet available to the public. Please login."
return Response(msg, 401, {'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
#
# locale and babel goodness
#
@babel.localeselector
def get_locale():
if 'lang' in request.args:
lang = request.args['lang']
if lang in LANGUAGES:
return lang
return request.accept_languages.best_match(LANGUAGES)
#
# template filters
#
@app.template_filter()
def slugify(value):
value = re.sub('[^\w\s-]', '', value).strip().lower()
return re.sub('[-\s]+', '-', value)
#
# request lifecycle
#
@app.before_request
def before_request():
mongo_uri = os.environ.get('MONGOHQ_URL')
if mongo_uri:
conn = pymongo.Connection(mongo_uri)
g.db = conn[os.environ.get('MONGOHQ_DB')]
else:
conn = pymongo.Connection()
g.db = conn['openingparliament']
@app.teardown_request
def teardown_request(exception):
if hasattr(g, 'db'):
g.db.connection.disconnect()
@app.context_processor
def inject_content():
doc = g.db.blocks.find_one({'path': request.path})
return {'content': doc.get('content') or EMPTY_BLOCK if doc else EMPTY_BLOCK}
@app.context_processor
def inject_admin():
return {'admin': True if request.authorization else False}
#
# the good, meaty url handlers
#
@app.route('/')
def index():
return render_template('index.html')
@app.route('/contact', methods=['GET', 'POST'])
def contact():
if request.method == 'POST':
# verify captcha
captcha_url = "http://www.google.com/recaptcha/api/verify"
params = {
"privatekey": RECAPTCHA_PRIVATE,
"remoteip": "",
"challenge": request.form['recaptcha_challenge_field'],
"response": request.form['recaptcha_response_field'],
}
response = requests.post(captcha_url, params=params)
print response.text
if not response.text.startswith("true"):
context = {"form": request.form}
flash('Sorry, your captcha was incorrect. Please try again.')
return render_template('contact.html', **context)
# send email
msg = "%s <%s>\n" % (request.form['name'], request.form['email'])
if request.form['organization']:
msg += "%s\n" % request.form['organization']
msg += "\n%s\n" % request.form['message']
kwargs = {
'api_key': POSTMARK_KEY,
'sender': 'contact@sunlightfoundation.com',
'reply_to': '%s' % request.form['email'],
'to': 'johnwonderlich@gmail.com, amandelbaum@ndi.org, dswislow@ndi.org, melissa@fundar.org.mx, gbrown@ndi.org, jkeseru@sunlightfoundation.com',
'bcc': 'jcarbaugh@sunlightfoundation.com',
'subject': '[OpeningParliament.org] contact: %s <%s>' % (request.form['name'], request.form['email']),
'text_body': msg,
}
postmark.PMMail(**kwargs).send()
flash('Your message has been sent. Thank you for contacting us!')
return redirect('/contact')
return render_template('contact.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/casestudies')
def casestudies():
return render_template('casestudies.html')
@app.route('/declaration')
def declaration():
return render_template('declaration.html')
@app.route('/glow')
def glow():
# return render_template('glow.html')
return redirect('http://openparl2014.org/')
@app.route('/networking')
def networking():
return render_template('networking.html')
@app.route('/organizations')
def organizations():
return render_template('organizations.html')
@app.route('/press')
def press():
# return render_template('press.html')
return redirect('/')
@app.route('/export')
def export():
docs = g.db.blocks.find()
content = {
'pages': [{'path': d['path'], 'content': d['content']} for d in docs],
}
return Response(json.dumps(content), content_type='application/json')
@app.route('/login')
@requires_auth
def login():
return redirect('/')
@app.route('/save', methods=['POST'])
@requires_auth
def save():
content = request.form.get('content', '').strip()
path = request.form.get('path')
if not path:
referrer = request.environ.get('HTTP_REFERER')
path = urlparse.urlparse(referrer).path
doc = {
'path': path,
'content': content,
}
g.db.blocks.update({'path': path}, {"$set": doc}, upsert=True)
return content
#
# scary RSS proxy method
#
@app.route('/rss')
def rss():
url = "http://blog.openingparliament.org/rss"
if url in SCARY_CACHE:
doc = SCARY_CACHE[url]
else:
resp = requests.get(url)
doc = resp.text
SCARY_CACHE[url] = doc
return Response(doc, content_type="text/xml")
#
# the "magic" as they call it
#
if __name__ == '__main__':
DEBUG = True
app.run(debug=DEBUG, port=8000)
| {
"repo_name": "sunlightlabs/openingparliament",
"path": "web.py",
"copies": "2",
"size": "6872",
"license": "bsd-3-clause",
"hash": 3251991926618207000,
"line_mean": 22.2162162162,
"line_max": 155,
"alpha_frac": 0.6143771828,
"autogenerated": false,
"ratio": 3.525910723447922,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5140287906247922,
"avg_score": null,
"num_lines": null
} |
#from functools import wraps
#import datetime
#import logging
from flask import Flask, current_app, request, jsonify, g
from flask_restful import Resource, Api, reqparse, HTTPException
from flask_mysqldb import MySQL
import jwt
import bcrypt
from api.controllers.base import BaseController
from api.controllers.user import UserController
#from utilities import get_log_level
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile('config.py')
# Loads the configration from /instance/config.py
# NOTE: This file is not checked into git - it must
# be defined for every installation
app.api = Api(app)
app.mysql = MySQL(app)
# define routes
app.api.add_resource(UserController, '/user/<int:user_id>')
@app.before_request
def before_request():
# Add anything you want to parse from the POST body
# to this array and it will be available in flask.g
post_parameters = ['name',
'password',
'image_source']
if not request.method == 'GET':
for param in post_parameters:
value = request.json[param] if param in request.json else None
setattr(g,param,value)
if __name__ == '__main__':
app.debug = True
app.run()
| {
"repo_name": "rwolande/wapi",
"path": "app.py",
"copies": "1",
"size": "1176",
"license": "mit",
"hash": -2616772197639104500,
"line_mean": 24.0212765957,
"line_max": 65,
"alpha_frac": 0.7270408163,
"autogenerated": false,
"ratio": 3.458823529411765,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9432158693819166,
"avg_score": 0.050741130378519704,
"num_lines": 47
} |
from functools import wraps
import errno
import os
import signal
import getpass
import unittest
from sshaolin.client import SSHClient
class BaseTestCase(unittest.TestCase):
username = getpass.getuser()
@classmethod
def setUpClass(cls):
super(BaseTestCase, cls).setUpClass()
cls.client = SSHClient(
"localhost", 22, cls.username, look_for_keys=True)
class TimeoutError(Exception):
pass
def timeout(seconds=10, error_message=os.strerror(errno.ETIME)):
def decorator(func):
def _handle_timeout(signum, frame):
raise TimeoutError(error_message)
def wrapper(*args, **kwargs):
signal.signal(signal.SIGALRM, _handle_timeout)
signal.alarm(seconds)
try:
result = func(*args, **kwargs)
finally:
signal.alarm(0)
return result
return wraps(func)(wrapper)
return decorator
# special test case for running ssh commands at module level
@timeout()
def import_ssh_test():
from tests import ssh_run_ls_on_import
ssh_run_ls_on_import
try:
import_ssh_test()
test_pass = True
except Exception:
test_pass = False
| {
"repo_name": "bucknerns/sshaolin",
"path": "tests/base_test.py",
"copies": "1",
"size": "1206",
"license": "apache-2.0",
"hash": 7614402637451429000,
"line_mean": 20.9272727273,
"line_max": 64,
"alpha_frac": 0.644278607,
"autogenerated": false,
"ratio": 4.1020408163265305,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.524631942332653,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import errno
import os
import signal
import time
class TimeoutError(Exception):
pass
def timeout(seconds, error_message=os.strerror(errno.ETIME)):
def decorator(func):
def _handle_timeout(signum, frame):
raise TimeoutError(error_message)
def wrapper(*args, **kwargs):
signal.signal(signal.SIGALRM, _handle_timeout)
alarmtime = int(seconds[0])-int(seconds[1])
if alarmtime <0 :
raise TimeoutError
try:
t = time.time()
result = func(*args, **kwargs)
seconds[1] = seconds[1] + (time.time() - t)
finally:
signal.alarm(0)
return result
return wraps(func)(wrapper)
return decorator
def timer(l):
def decorator(func):
def wrapper(*args,**kwargs):
t = time.time()
result = func(*args, **kwargs)
l[0] = l[0] + (time.time() - t)
return result
return wraps(func)(wrapper)
return decorator
| {
"repo_name": "skjindal93/Ultimate-Tic-Tac-Toe",
"path": "timeout.py",
"copies": "1",
"size": "1096",
"license": "apache-2.0",
"hash": 4557387805126981600,
"line_mean": 22.8260869565,
"line_max": 61,
"alpha_frac": 0.5428832117,
"autogenerated": false,
"ratio": 4.248062015503876,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5290945227203876,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import errno
import os
import signal
#==============================================================================
# Variables
#==============================================================================
# Some descriptive variables
#name = "virtdc"
#version = "0.1.0"
#long_description = """virtdc is a set of API's/tools written to create virtual machines for cloud users efficiently."""
#url = "https://github.com/dineshappavoo/virtdc"
#license = ""
#==============================================================================
class TimeoutError(Exception):
pass
def timeout(seconds=10, error_message=os.strerror(errno.ETIME)):
def decorator(func):
def _handle_timeout(signum, frame):
raise TimeoutError(error_message)
def wrapper(*args, **kwargs):
signal.signal(signal.SIGALRM, _handle_timeout)
signal.alarm(seconds)
try:
result = func(*args, **kwargs)
finally:
signal.alarm(0)
return result
return wraps(func)(wrapper)
return decorator
| {
"repo_name": "dineshappavoo/virtdc",
"path": "simulation/timeout.py",
"copies": "2",
"size": "1183",
"license": "mit",
"hash": -1858173311888884700,
"line_mean": 30.1315789474,
"line_max": 123,
"alpha_frac": 0.4759087067,
"autogenerated": false,
"ratio": 5.07725321888412,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005753313961081363,
"num_lines": 38
} |
from functools import wraps
import errno
import os
import sys
import signal
#Signal module doesn't work
if sys.platform=='win32':
import wtimeout
class TimeoutError(Exception):
pass
def timeout(seconds=2147483647, error_message='Timer Expired'):
def decorator(func):
if sys.platform=='win32':
def wrapper(*args, **kwargs):
timer=wtimeout.Ticker(seconds)
timer.start()
was_set=False
try:
result=func(*args, **kwargs)
except Exception as e:
timer.stop()
timer.join()
raise e
was_set=timer.consume()
timer.stop()
timer.join()
if (was_set):
raise TimeoutError(error_message)
return result
else:
def wrapper(*args, **kwargs):
signal.signal(signal.SIGALRM, _handle_timeout)
signal.alarm(seconds)
try:
result = func(*args, **kwargs)
finally:
signal.alarm(0)
return result
def _handle_timeout(signum, frame):
raise TimeoutError(error_message)
return wraps(func)(wrapper)
return decorator
| {
"repo_name": "fugue/emulambda",
"path": "emulambda/timeout.py",
"copies": "1",
"size": "1359",
"license": "apache-2.0",
"hash": -3821085345849352000,
"line_mean": 26.18,
"line_max": 63,
"alpha_frac": 0.4981604121,
"autogenerated": false,
"ratio": 4.978021978021978,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5976182390121979,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import glob
import os
import sys
import threading
import uuid
try:
from redis import Redis
from redis.client import pairs_to_dict
from redis.client import zset_score_pairs
from redis.exceptions import ConnectionError
from redis.exceptions import TimeoutError
except ImportError:
Redis = object
zset_score_pairs = None
ConnectionError = TimeoutError = Exception
from walrus.autocomplete import Autocomplete
from walrus.cache import Cache
from walrus.containers import Array
from walrus.containers import BitField
from walrus.containers import BloomFilter
from walrus.containers import ConsumerGroup
from walrus.containers import Hash
from walrus.containers import HyperLogLog
from walrus.containers import List
from walrus.containers import Set
from walrus.containers import Stream
from walrus.containers import ZSet
from walrus.counter import Counter
from walrus.fts import Index
from walrus.graph import Graph
from walrus.lock import Lock
from walrus.rate_limit import RateLimit
from walrus.streams import TimeSeries
class TransactionLocal(threading.local):
def __init__(self, **kwargs):
super(TransactionLocal, self).__init__(**kwargs)
self.pipes = []
@property
def pipe(self):
if len(self.pipes):
return self.pipes[-1]
def commit(self):
pipe = self.pipes.pop()
return pipe.execute()
def abort(self):
pipe = self.pipes.pop()
pipe.reset()
class Database(Redis):
"""
Redis-py client with some extras.
"""
def __init__(self, *args, **kwargs):
"""
:param args: Arbitrary positional arguments to pass to the
base ``Redis`` instance.
:param kwargs: Arbitrary keyword arguments to pass to the
base ``Redis`` instance.
:param str script_dir: Path to directory containing walrus
scripts. Use "script_dir=False" to disable loading any scripts.
"""
script_dir = kwargs.pop('script_dir', None)
super(Database, self).__init__(*args, **kwargs)
self.__mapping = {
b'list': self.List,
b'set': self.Set,
b'zset': self.ZSet,
b'hash': self.Hash}
self._transaction_local = TransactionLocal()
self._transaction_lock = threading.RLock()
if script_dir is not False:
self.init_scripts(script_dir=script_dir)
def xsetid(self, name, id):
"""
Set the last ID of the given stream.
:param name: stream identifier
:param id: new value for last ID
"""
return self.execute_command('XSETID', name, id) == b'OK'
def xpending_summary(self, key, group):
"""
Pending message summary report.
:param key: stream identifier
:param group: consumer group name
:returns: dictionary of information about pending messages
"""
return self.xpending(key, group)
def get_transaction(self):
with self._transaction_lock:
local = self._transaction_local
local.pipes.append(self.pipeline())
return local.pipe
def commit_transaction(self):
"""
Commit the currently active transaction (Pipeline). If no
transaction is active in the current thread, an exception
will be raised.
:returns: The return value of executing the Pipeline.
:raises: ``ValueError`` if no transaction is active.
"""
with self._transaction_lock:
local = self._transaction_local
if not local.pipes:
raise ValueError('No transaction is currently active.')
return local.commit()
def clear_transaction(self):
"""
Clear the currently active transaction (if exists). If the
transaction stack is not empty, then a new pipeline will
be initialized.
:returns: No return value.
:raises: ``ValueError`` if no transaction is active.
"""
with self._transaction_lock:
local = self._transaction_local
if not local.pipes:
raise ValueError('No transaction is currently active.')
local.abort()
def atomic(self):
return _Atomic(self)
def init_scripts(self, script_dir=None):
self._scripts = {}
if not script_dir:
script_dir = os.path.join(os.path.dirname(__file__), 'scripts')
for filename in glob.glob(os.path.join(script_dir, '*.lua')):
with open(filename, 'r') as fh:
script_obj = self.register_script(fh.read())
script_name = os.path.splitext(os.path.basename(filename))[0]
self._scripts[script_name] = script_obj
def run_script(self, script_name, keys=None, args=None):
"""
Execute a walrus script with the given arguments.
:param script_name: The base name of the script to execute.
:param list keys: Keys referenced by the script.
:param list args: Arguments passed in to the script.
:returns: Return value of script.
.. note:: Redis scripts require two parameters, ``keys``
and ``args``, which are referenced in lua as ``KEYS``
and ``ARGV``.
"""
return self._scripts[script_name](keys, args)
def get_temp_key(self):
"""
Generate a temporary random key using UUID4.
"""
return 'temp.%s' % uuid.uuid4()
def __iter__(self):
"""
Iterate over the keys of the selected database.
"""
return iter(self.scan_iter())
def __len__(self):
return self.dbsize()
def search(self, pattern):
"""
Search the keyspace of the selected database using the
given search pattern.
:param str pattern: Search pattern using wildcards.
:returns: Iterator that yields matching keys.
"""
return self.scan_iter(pattern)
def get_key(self, key):
"""
Return a rich object for the given key. For instance, if
a hash key is requested, then a :py:class:`Hash` will be
returned.
Note: only works for Hash, List, Set and ZSet.
:param str key: Key to retrieve.
:returns: A hash, set, list, zset or array.
"""
return self.__mapping.get(self.type(key), self.__getitem__)(key)
def hash_exists(self, key):
return self.exists(key)
def autocomplete(self, namespace='autocomplete', **kwargs):
return Autocomplete(self, namespace, **kwargs)
def cache(self, name='cache', default_timeout=3600):
"""
Create a :py:class:`Cache` instance.
:param str name: The name used to prefix keys used to
store cached data.
:param int default_timeout: The default key expiry.
:returns: A :py:class:`Cache` instance.
"""
return Cache(self, name=name, default_timeout=default_timeout)
def counter(self, name):
"""
Create a :py:class:`Counter` instance.
:param str name: The name used to store the counter's value.
:returns: A :py:class:`Counter` instance.
"""
return Counter(self, name=name)
def graph(self, name, *args, **kwargs):
"""
Creates a :py:class:`Graph` instance.
:param str name: The namespace for the graph metadata.
:returns: a :py:class:`Graph` instance.
"""
return Graph(self, name, *args, **kwargs)
def lock(self, name, ttl=None, lock_id=None):
"""
Create a named :py:class:`Lock` instance. The lock implements
an API similar to the standard library's ``threading.Lock``,
and can also be used as a context manager or decorator.
:param str name: The name of the lock.
:param int ttl: The time-to-live for the lock in milliseconds
(optional). If the ttl is ``None`` then the lock will not
expire.
:param str lock_id: Optional identifier for the lock instance.
"""
return Lock(self, name, ttl, lock_id)
def rate_limit(self, name, limit=5, per=60, debug=False):
"""
Rate limit implementation. Allows up to `limit` of events every `per`
seconds.
See :ref:`rate-limit` for more information.
"""
return RateLimit(self, name, limit, per, debug)
def Index(self, name, **options):
"""
Create a :py:class:`Index` full-text search index with the given
name and options.
"""
return Index(self, name, **options)
def List(self, key):
"""
Create a :py:class:`List` instance wrapping the given key.
"""
return List(self, key)
def Hash(self, key):
"""
Create a :py:class:`Hash` instance wrapping the given key.
"""
return Hash(self, key)
def Set(self, key):
"""
Create a :py:class:`Set` instance wrapping the given key.
"""
return Set(self, key)
def ZSet(self, key):
"""
Create a :py:class:`ZSet` instance wrapping the given key.
"""
return ZSet(self, key)
def HyperLogLog(self, key):
"""
Create a :py:class:`HyperLogLog` instance wrapping the given
key.
"""
return HyperLogLog(self, key)
def Array(self, key):
"""
Create a :py:class:`Array` instance wrapping the given key.
"""
return Array(self, key)
def Stream(self, key):
"""
Create a :py:class:`Stream` instance wrapping the given key.
"""
return Stream(self, key)
def consumer_group(self, group, keys, consumer=None):
"""
Create a named :py:class:`ConsumerGroup` instance for the given key(s).
:param group: name of consumer group
:param keys: stream identifier(s) to monitor. May be a single stream
key, a list of stream keys, or a key-to-minimum id mapping. The
minimum id for each stream should be considered an exclusive
lower-bound. The '$' value can also be used to only read values
added *after* our command started blocking.
:param consumer: name for consumer within group
:returns: a :py:class:`ConsumerGroup` instance
"""
return ConsumerGroup(self, group, keys, consumer=consumer)
def time_series(self, group, keys, consumer=None):
"""
Create a named :py:class:`TimeSeries` consumer-group for the
given key(s). TimeSeries objects are almost identical to
:py:class:`ConsumerGroup` except they offer a higher level of
abstraction and read/write message ids as datetimes.
:param group: name of consumer group
:param keys: stream identifier(s) to monitor. May be a single stream
key, a list of stream keys, or a key-to-minimum id mapping. The
minimum id for each stream should be considered an exclusive
lower-bound. The '$' value can also be used to only read values
added *after* our command started blocking.
:param consumer: name for consumer within group
:returns: a :py:class:`TimeSeries` instance
"""
return TimeSeries(self, group, keys, consumer=consumer)
def bit_field(self, key):
"""
Container for working with the Redis BITFIELD command.
:returns: a :py:class:`BitField` instance.
"""
return BitField(self, key)
def bloom_filter(self, key, size=64 * 1024):
"""
Create a :py:class:`BloomFilter` container type.
Bloom-filters are probabilistic data-structures that are used to answer
the question: "is X a member of set S?" It is possible to receive a
false positive, but impossible to receive a false negative (in other
words, if the bloom filter contains a value, it will never erroneously
report that it does *not* contain such a value). The accuracy of the
bloom-filter and the likelihood of a false positive can be reduced by
increasing the size of the bloomfilter. The default size is 64KB (or
524,288 bits).
"""
return BloomFilter(self, key, size)
def cas(self, key, value, new_value):
"""
Perform an atomic compare-and-set on the value in "key", using a prefix
match on the provided value.
"""
return self.run_script('cas', keys=[key], args=[value, new_value])
def listener(self, channels=None, patterns=None, is_async=False):
"""
Decorator for wrapping functions used to listen for Redis
pub-sub messages.
The listener will listen until the decorated function
raises a ``StopIteration`` exception.
:param list channels: Channels to listen on.
:param list patterns: Patterns to match.
:param bool is_async: Whether to start the listener in a
separate thread.
"""
def decorator(fn):
_channels = channels or []
_patterns = patterns or []
@wraps(fn)
def inner():
pubsub = self.pubsub()
def listen():
for channel in _channels:
pubsub.subscribe(channel)
for pattern in _patterns:
pubsub.psubscribe(pattern)
for data_dict in pubsub.listen():
try:
ret = fn(**data_dict)
except StopIteration:
pubsub.close()
break
if is_async:
worker = threading.Thread(target=listen)
worker.start()
return worker
else:
listen()
return inner
return decorator
def stream_log(self, callback, connection_id='monitor'):
"""
Stream Redis activity one line at a time to the given
callback.
:param callback: A function that accepts a single argument,
the Redis command.
"""
conn = self.connection_pool.get_connection(connection_id, None)
conn.send_command('monitor')
while callback(conn.read_response()):
pass
class _Atomic(object):
def __init__(self, db):
self.db = db
@property
def pipe(self):
return self.db._transaction_local.pipe
def __enter__(self):
self.db.get_transaction()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type:
self.clear(False)
else:
self.commit(False)
def commit(self, begin_new=True):
ret = self.db.commit_transaction()
if begin_new:
self.db.get_transaction()
return ret
def clear(self, begin_new=True):
self.db.clear_transaction()
if begin_new:
self.db.get_transaction()
| {
"repo_name": "coleifer/walrus",
"path": "walrus/database.py",
"copies": "1",
"size": "15164",
"license": "mit",
"hash": 8934847825753778000,
"line_mean": 31.9652173913,
"line_max": 79,
"alpha_frac": 0.5941044579,
"autogenerated": false,
"ratio": 4.370028818443804,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5464133276343804,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import glob
import os
import threading
import uuid
try:
from redis import Redis
except ImportError:
Redis = object
from walrus.autocomplete import Autocomplete
from walrus.cache import Cache
from walrus.containers import Array
from walrus.containers import Hash
from walrus.containers import HyperLogLog
from walrus.containers import List
from walrus.containers import Set
from walrus.containers import ZSet
from walrus.counter import Counter
from walrus.graph import Graph
from walrus.lock import Lock
from walrus.rate_limit import RateLimit
class TransactionLocal(threading.local):
def __init__(self, **kwargs):
super(TransactionLocal, self).__init__(**kwargs)
self.pipes = []
@property
def pipe(self):
if len(self.pipes):
return self.pipes[-1]
def commit(self):
pipe = self.pipes.pop()
return pipe.execute()
def abort(self):
pipe = self.pipes.pop()
pipe.reset()
class Database(Redis):
"""
Redis-py client with some extras.
"""
def __init__(self, *args, **kwargs):
"""
:param args: Arbitrary positional arguments to pass to the
base ``Redis`` instance.
:param kwargs: Arbitrary keyword arguments to pass to the
base ``Redis`` instance.
:param str script_dir: Path to directory containing walrus
scripts.
"""
script_dir = kwargs.pop('script_dir', None)
super(Database, self).__init__(*args, **kwargs)
self.__mapping = {
'list': self.List,
'set': self.Set,
'zset': self.ZSet,
'hash': self.Hash}
self._transaction_local = TransactionLocal()
self._transaction_lock = threading.RLock()
self.init_scripts(script_dir=script_dir)
def get_transaction(self):
with self._transaction_lock:
local = self._transaction_local
local.pipes.append(self.pipeline())
return local.pipe
def commit_transaction(self):
"""
Commit the currently active transaction (Pipeline). If no
transaction is active in the current thread, an exception
will be raised.
:returns: The return value of executing the Pipeline.
:raises: ``ValueError`` if no transaction is active.
"""
with self._transaction_lock:
local = self._transaction_local
if not local.pipes:
raise ValueError('No transaction is currently active.')
return local.commit()
def clear_transaction(self):
"""
Clear the currently active transaction (if exists). If the
transaction stack is not empty, then a new pipeline will
be initialized.
:returns: No return value.
:raises: ``ValueError`` if no transaction is active.
"""
with self._transaction_lock:
local = self._transaction_local
if not local.pipes:
raise ValueError('No transaction is currently active.')
local.abort()
def atomic(self):
return _Atomic(self)
def init_scripts(self, script_dir=None):
self._scripts = {}
if not script_dir:
script_dir = os.path.join(os.path.dirname(__file__), 'scripts')
for filename in glob.glob(os.path.join(script_dir, '*.lua')):
with open(filename, 'r') as fh:
script_obj = self.register_script(fh.read())
script_name = os.path.splitext(os.path.basename(filename))[0]
self._scripts[script_name] = script_obj
def run_script(self, script_name, keys=None, args=None):
"""
Execute a walrus script with the given arguments.
:param script_name: The base name of the script to execute.
:param list keys: Keys referenced by the script.
:param list args: Arguments passed in to the script.
:returns: Return value of script.
.. note:: Redis scripts require two parameters, ``keys``
and ``args``, which are referenced in lua as ``KEYS``
and ``ARGV``.
"""
return self._scripts[script_name](keys, args)
def get_temp_key(self):
"""
Generate a temporary random key using UUID4.
"""
return 'temp.%s' % uuid.uuid4()
def __iter__(self):
"""
Iterate over the keys of the selected database.
"""
return iter(self.scan_iter())
def search(self, pattern):
"""
Search the keyspace of the selected database using the
given search pattern.
:param str pattern: Search pattern using wildcards.
:returns: Iterator that yields matching keys.
"""
return self.scan_iter(pattern)
def get_key(self, key):
"""
Return a rich object for the given key. For instance, if
a hash key is requested, then a :py:class:`Hash` will be
returned.
:param str key: Key to retrieve.
:returns: A hash, set, list, zset or array.
"""
return self.__mapping.get(self.type(key), self.__getitem__)(key)
def hash_exists(self, key):
return self.exists(key)
def autocomplete(self, namespace='autocomplete', **kwargs):
return Autocomplete(self, namespace, **kwargs)
def cache(self, name='cache', default_timeout=3600):
"""
Create a :py:class:`Cache` instance.
:param str name: The name used to prefix keys used to
store cached data.
:param int default_timeout: The default key expiry.
:returns: A :py:class:`Cache` instance.
"""
return Cache(self, name=name, default_timeout=default_timeout)
def counter(self, name):
"""
Create a :py:class:`Counter` instance.
:param str name: The name used to store the counter's value.
:returns: A :py:class:`Counter` instance.
"""
return Counter(self, name=name)
def graph(self, name, *args, **kwargs):
"""
Creates a :py:class:`Graph` instance.
:param str name: The namespace for the graph metadata.
:returns: a :py:class:`Graph` instance.
"""
return Graph(self, name, *args, **kwargs)
def lock(self, name, ttl=None, lock_id=None, lock_test_delay=None):
"""
Create a named :py:class:`Lock` instance. The lock implements
an API similar to the standard library's ``threading.Lock``,
and can also be used as a context manager or decorator.
:param str name: The name of the lock.
:param int ttl: The time-to-live for the lock in milliseconds
(optional). If the ttl is ``None`` then the lock will not
expire.
:param str lock_id: Optional identifier for the lock instance.
:param int lock_test_delay: The time between polls when trying to
acquire lock. Defaults to TTL if not defined.
"""
return Lock(self, name, ttl, lock_id, lock_test_delay)
def rate_limit(self, name, limit=5, per=60, debug=False):
"""
Rate limit implementation. Allows up to `limit` of events every `per`
seconds.
See :ref:`rate-limit` for more information.
"""
return RateLimit(self, name, limit, per, debug)
def List(self, key):
"""
Create a :py:class:`List` instance wrapping the given key.
"""
return List(self, key)
def Hash(self, key):
"""
Create a :py:class:`Hash` instance wrapping the given key.
"""
return Hash(self, key)
def Set(self, key):
"""
Create a :py:class:`Set` instance wrapping the given key.
"""
return Set(self, key)
def ZSet(self, key):
"""
Create a :py:class:`ZSet` instance wrapping the given key.
"""
return ZSet(self, key)
def HyperLogLog(self, key):
"""
Create a :py:class:`HyperLogLog` instance wrapping the given
key.
"""
return HyperLogLog(self, key)
def Array(self, key):
"""
Create a :py:class:`Array` instance wrapping the given key.
"""
return Array(self, key)
def listener(self, channels=None, patterns=None, async=False):
"""
Decorator for wrapping functions used to listen for Redis
pub-sub messages.
The listener will listen until the decorated function
raises a ``StopIteration`` exception.
:param list channels: Channels to listen on.
:param list patterns: Patterns to match.
:param bool async: Whether to start the listener in a
separate thread.
"""
def decorator(fn):
_channels = channels or []
_patterns = patterns or []
@wraps(fn)
def inner():
pubsub = self.pubsub()
def listen():
for channel in _channels:
pubsub.subscribe(channel)
for pattern in _patterns:
pubsub.psubscribe(pattern)
for data_dict in pubsub.listen():
try:
ret = fn(**data_dict)
except StopIteration:
pubsub.close()
break
if async:
worker = threading.Thread(target=listen)
worker.start()
return worker
else:
listen()
return inner
return decorator
def stream_log(self, callback, connection_id='monitor'):
"""
Stream Redis activity one line at a time to the given
callback.
:param callback: A function that accepts a single argument,
the Redis command.
"""
conn = self.connection_pool.get_connection(connection_id, None)
conn.send_command('monitor')
while callback(conn.read_response()):
pass
class _Atomic(object):
def __init__(self, db):
self.db = db
@property
def pipe(self):
return self.db._transaction_local.pipe
def __enter__(self):
self.db.get_transaction()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type:
self.clear(False)
else:
self.commit(False)
def commit(self, begin_new=True):
ret = self.db.commit_transaction()
if begin_new:
self.db.get_transaction()
return ret
def clear(self, begin_new=True):
self.db.clear_transaction()
if begin_new:
self.db.get_transaction()
| {
"repo_name": "johndlong/walrus",
"path": "walrus/database.py",
"copies": "1",
"size": "10841",
"license": "mit",
"hash": 8555139882802138000,
"line_mean": 30.1522988506,
"line_max": 77,
"alpha_frac": 0.5744857485,
"autogenerated": false,
"ratio": 4.389068825910932,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00015341066550536442,
"num_lines": 348
} |
from functools import wraps
import hashlib
from hashlib import sha256
import re
import six
if six.PY3:
long = int
def ensure_bytes(data):
if not isinstance(data, six.binary_type):
return data.encode('utf-8')
return data
def ensure_str(data):
if isinstance(data, six.binary_type):
return data.decode('utf-8')
elif not isinstance(data, six.string_types):
raise ValueError("Invalid value for string")
return data
def chr_py2(num):
"""Ensures that python3's chr behavior matches python2."""
if six.PY3:
return bytes([num])
return chr(num)
def hash160(data):
"""Return ripemd160(sha256(data))"""
rh = hashlib.new('ripemd160', sha256(data).digest())
return rh.digest()
def is_hex_string(string):
"""Check if the string is only composed of hex characters."""
pattern = re.compile(r'[A-Fa-f0-9]+')
if isinstance(string, six.binary_type):
string = str(string)
return pattern.match(string) is not None
def long_to_hex(l, size):
"""Encode a long value as a hex string, 0-padding to size.
Note that size is the size of the resulting hex string. So, for a 32Byte
long size should be 64 (two hex characters per byte"."""
f_str = "{0:0%sx}" % size
return ensure_bytes(f_str.format(l).lower())
def long_or_int(val, *args):
return long(val, *args)
def memoize(f):
"""Memoization decorator for a function taking one or more arguments."""
def _c(*args, **kwargs):
if not hasattr(f, 'cache'):
f.cache = dict()
key = (args, tuple(kwargs))
if key not in f.cache:
f.cache[key] = f(*args, **kwargs)
return f.cache[key]
return wraps(f)(_c)
| {
"repo_name": "BlockIo/multimerchant-python",
"path": "multimerchant/wallet/utils.py",
"copies": "2",
"size": "1730",
"license": "mit",
"hash": -9147594990519714000,
"line_mean": 24.0724637681,
"line_max": 76,
"alpha_frac": 0.6317919075,
"autogenerated": false,
"ratio": 3.4257425742574257,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5057534481757425,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import hashlib
import datetime
import mimetypes
import random
from bson import ObjectId
from flask import jsonify, Response, url_for
from flask_jwt import jwt_required, current_user
from flask_restful import reqparse, abort
import pymongo
import werkzeug
from werkzeug.utils import secure_filename
from flask.ext import restful
import notifier
from core import app, DB, FS, redis_client
api_request_parser = reqparse.RequestParser()
api_request_parser.add_argument('api_key', type=str, required=True, help="Missing api key")
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config.get('ALLOWED_EXTENSIONS')
def get_cam_by_id(camera_id):
if ObjectId.is_valid(camera_id):
return DB.cams.find_one({"_id": ObjectId(camera_id)})
else:
return None
def requires_api_key(f):
@wraps(f)
def decorated_function(*args, **kwargs):
api_args = api_request_parser.parse_args()
input_api_key = api_args['api_key']
if not input_api_key:
restful.abort(401)
else:
valid_cam = DB.cams.find_one({"api_key": input_api_key})
if not valid_cam:
restful.abort(401, description="Valid api key is required")
else:
valid_cam['last_access'] = datetime.datetime.now()
DB.cams.save(valid_cam)
return f(*args, **kwargs)
return decorated_function
class CameraStateController(restful.Resource):
@requires_api_key
def get(self):
args = api_request_parser.parse_args()
valid_cam = DB.cams.find_one({"api_key": args['api_key']})
if valid_cam:
return {'result': 'OK', 'camera_state': valid_cam.get('active')}
return {'result': 'NOK'}, 401
@staticmethod
@jwt_required()
def post(camera_id):
if camera_id:
camera = get_cam_by_id(camera_id)
if camera:
CameraStateController.change_camera_state(camera, not camera.get('active'), current_user.email)
return jsonify(result="OK", new_state=camera.get('active'), id=camera_id)
else:
return jsonify(result="NOK", error="Invalid camera id")
return jsonify(result="NOK")
@staticmethod
def change_camera_state(camera, new_state, user):
camera['active'] = new_state
DB.cams.save(camera)
DB.cams.history.insert({
'action': 'change_state',
'camera': camera.get('name'),
'when': datetime.datetime.now(),
'new_state': camera.get('active'),
'user': user
})
notifier.notify_camera_state_changed(camera)
file_upload_parser = api_request_parser.copy()
file_upload_parser.add_argument('file', type=werkzeug.datastructures.FileStorage, location='files',
required=True)
file_upload_parser.add_argument('date', type=str)
file_upload_parser.add_argument('event', type=str)
class UploadImage(restful.Resource):
@staticmethod
@requires_api_key
def post():
args = file_upload_parser.parse_args()
request_cam = DB.cams.find_one({"api_key": args['api_key']})
in_image_file = args['file']
if in_image_file and allowed_file(in_image_file.filename):
filename = secure_filename(in_image_file.filename)
content_type = in_image_file.content_type \
if in_image_file.content_type else mimetypes.guess_type(in_image_file.filename)[0]
oid = FS.put(in_image_file, content_type=content_type,
filename=filename)
DB.images.save({
"image_id": str(oid),
"date_saved": datetime.datetime.now(),
"date_taken": args.get('date') if 'date' in args else datetime.datetime.now(),
"camera": request_cam.get('name'),
})
notifier.notify_new_image(request_cam, url_for('serve_gridfs_file', oid=str(oid), _external=True))
redis_client.publish(str(request_cam.get("_id")) + ':stream', oid)
return jsonify(status="OK", oid=str(oid), camera_state=request_cam.get('active'))
return jsonify(status="NOK", error="not allowed file")
class CameraController(restful.Resource):
@staticmethod
@jwt_required()
def delete(camera_id):
if camera_id:
cam_by_id = get_cam_by_id(camera_id)
if cam_by_id:
DB.cams.remove({"_id": ObjectId(camera_id)})
DB.cams.history.insert({
'action': 'remove',
'camera': cam_by_id.get('name'),
'when': datetime.datetime.now(),
'user': current_user.email
})
return jsonify(result="OK")
return jsonify(result="NOK")
class StreamController(restful.Resource):
@staticmethod
def get_camera_frame(camera_id):
pubsub = redis_client.get_pubsub(camera_id + ':stream')
if pubsub:
for message in pubsub.listen():
app.logger.debug("Got this %s, data", message)
if ObjectId.is_valid(message.get('data')):
image_file = FS.get(ObjectId(message.get('data')))
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + image_file.read() + b'\r\n')
else:
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n\r\n')
@staticmethod
def get(camera_id):
if camera_id and get_cam_by_id(camera_id):
return Response(StreamController.get_camera_frame(camera_id),
mimetype='multipart/x-mixed-replace; boundary=frame')
else:
abort(404)
class StreamingController(restful.Resource):
@staticmethod
@jwt_required()
def get():
cameras = []
for camera in DB.cams.find():
cameras.append({
"id": str(camera.get("_id")),
"name": camera.get('name'),
"active": camera.get('active'),
})
return jsonify(result="OK", cameras=cameras)
class CamerasController(restful.Resource):
def __init__(self):
self.register_parser = reqparse.RequestParser()
self.register_parser.add_argument('cam_name', type=str, required=True, help='Provide camera name')
@staticmethod
@jwt_required()
def get():
cameras = []
for camera in DB.cams.find():
cameras.append({
"id": str(camera.get("_id")),
"name": camera.get('name'),
"api_key": camera.get('api_key'),
"active": camera.get('active'),
"last_access": camera.get('last_access'),
"registered": camera.get('registered'),
})
for camera in cameras:
# get the last history entry of the camera
last_events = DB.cams.history.find({"camera": camera.get('name')}) \
.sort("when", pymongo.DESCENDING) \
.limit(5)
if last_events:
camera['last_events'] = list()
for last_event in last_events:
camera['last_events'].append({
"when": last_event.get("when"),
"user": last_event.get("user"),
"action": last_event.get("action"),
"new_state": last_event.get("new_state")
})
last_image = DB.images.find_one({"camera": camera.get('name')}, sort=[("date_saved", pymongo.DESCENDING)])
if last_image:
camera["last_image_date"] = last_image.get("date_saved")
return jsonify(result="OK", cameras=cameras)
@jwt_required()
def put(self):
args = self.register_parser.parse_args()
input_cam_name = args.get('cam_name')
existing = DB.cams.find_one({"name": input_cam_name})
if existing:
return {'error': "There is already a camera with this name"}, 400
else:
new_cam_api_key = hashlib.sha224(str(random.getrandbits(256))).hexdigest()
DB.cams.insert({
"name": input_cam_name,
"api_key": new_cam_api_key,
"registered": datetime.datetime.now(),
"active": True
})
return {'status': "OK", 'api_key': new_cam_api_key}
| {
"repo_name": "SillentTroll/rascam_server",
"path": "wsgi/camera_api.py",
"copies": "1",
"size": "8578",
"license": "apache-2.0",
"hash": 8801335565332977000,
"line_mean": 35.6581196581,
"line_max": 118,
"alpha_frac": 0.5606201912,
"autogenerated": false,
"ratio": 3.850089766606822,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9907598710200509,
"avg_score": 0.0006222495212625515,
"num_lines": 234
} |
from functools import wraps
import hashlib
import pickle
import sys
import threading
import time
try:
from Queue import Queue # Python 2
except ImportError:
from queue import Queue # Python 3
from walrus.utils import decode
from walrus.utils import encode
from walrus.utils import PY3
if PY3:
imap = map
else:
from itertools import imap
class Cache(object):
"""
Cache implementation with simple ``get``/``set`` operations,
and a decorator.
"""
def __init__(self, database, name='cache', default_timeout=None,
debug=False):
"""
:param database: :py:class:`Database` instance.
:param name: Namespace for this cache.
:param int default_timeout: Default cache timeout.
:param debug: Disable cache for debugging purposes. Cache will no-op.
"""
self.database = database
self.name = name
self.prefix_len = len(self.name) + 1
self.default_timeout = default_timeout
self.debug = debug
self.metrics = {'hits': 0, 'misses': 0, 'writes': 0}
def make_key(self, s):
return ':'.join((self.name, s))
def unmake_key(self, k):
return k[self.prefix_len:]
def get(self, key, default=None):
"""
Retreive a value from the cache. In the event the value
does not exist, return the ``default``.
"""
key = self.make_key(key)
if self.debug:
return default
try:
value = self.database[key]
except KeyError:
self.metrics['misses'] += 1
return default
else:
self.metrics['hits'] += 1
return pickle.loads(value)
def set(self, key, value, timeout=None):
"""
Cache the given ``value`` in the specified ``key``. If no
timeout is specified, the default timeout will be used.
"""
key = self.make_key(key)
if timeout is None:
timeout = self.default_timeout
if self.debug:
return True
pickled_value = pickle.dumps(value)
self.metrics['writes'] += 1
if timeout:
return self.database.setex(key, int(timeout), pickled_value)
else:
return self.database.set(key, pickled_value)
def delete(self, key):
"""Remove the given key from the cache."""
if self.debug: return 0
return self.database.delete(self.make_key(key))
def get_many(self, keys):
"""
Retrieve multiple values from the cache. Missing keys are not included
in the result dictionary.
:param list keys: list of keys to fetch.
:returns: dictionary mapping keys to cached values.
"""
accum = {}
if self.debug: return accum
prefixed = [self.make_key(key) for key in keys]
for key, value in zip(keys, self.database.mget(prefixed)):
if value is not None:
accum[key] = pickle.loads(value)
self.metrics['hits'] += 1
else:
self.metrics['misses'] += 1
return accum
def set_many(self, __data=None, timeout=None, **kwargs):
"""
Set multiple key/value pairs in one operation.
:param dict __data: provide data as dictionary of key/value pairs.
:param timeout: optional timeout for data.
:param kwargs: alternatively, provide data as keyword arguments.
:returns: True on success.
"""
if self.debug:
return True
timeout = timeout if timeout is not None else self.default_timeout
if __data is not None:
kwargs.update(__data)
accum = {}
for key, value in kwargs.items():
accum[self.make_key(key)] = pickle.dumps(value)
pipeline = self.database.pipeline()
pipeline.mset(accum)
if timeout:
for key in accum:
pipeline.expire(key, timeout)
self.metrics['writes'] += len(accum)
return pipeline.execute()[0]
def delete_many(self, keys):
"""
Delete multiple keys from the cache in one operation.
:param list keys: keys to delete.
:returns: number of keys removed.
"""
if self.debug: return
prefixed = [self.make_key(key) for key in keys]
return self.database.delete(*prefixed)
def keys(self):
"""
Return all keys for cached values.
"""
return imap(decode, self.database.keys(self.make_key('') + '*'))
def flush(self):
"""Remove all cached objects from the database."""
keys = list(self.keys())
if keys:
return self.database.delete(*keys)
def incr(self, key, delta=1):
return self.database.incr(self.make_key(key), delta)
def _key_fn(a, k):
return hashlib.md5(pickle.dumps((a, k))).hexdigest()
def cached(self, key_fn=_key_fn, timeout=None, metrics=False):
"""
Decorator that will transparently cache calls to the
wrapped function. By default, the cache key will be made
up of the arguments passed in (like memoize), but you can
override this by specifying a custom ``key_fn``.
:param key_fn: Function used to generate a key from the
given args and kwargs.
:param timeout: Time to cache return values.
:param metrics: Keep stats on cache utilization and timing.
:returns: Return the result of the decorated function
call with the given args and kwargs.
Usage::
cache = Cache(my_database)
@cache.cached(timeout=60)
def add_numbers(a, b):
return a + b
print add_numbers(3, 4) # Function is called.
print add_numbers(3, 4) # Not called, value is cached.
add_numbers.bust(3, 4) # Clear cache for (3, 4).
print add_numbers(3, 4) # Function is called.
The decorated function also gains a new attribute named
``bust`` which will clear the cache for the given args.
"""
def decorator(fn):
def make_key(args, kwargs):
return '%s:%s' % (fn.__name__, key_fn(args, kwargs))
def bust(*args, **kwargs):
return self.delete(make_key(args, kwargs))
_metrics = {
'hits': 0,
'misses': 0,
'avg_hit_time': 0,
'avg_miss_time': 0}
@wraps(fn)
def inner(*args, **kwargs):
start = time.time()
is_cache_hit = True
key = make_key(args, kwargs)
res = self.get(key)
if res is None:
res = fn(*args, **kwargs)
self.set(key, res, timeout)
is_cache_hit = False
if metrics:
dur = time.time() - start
if is_cache_hit:
_metrics['hits'] += 1
_metrics['avg_hit_time'] += (dur / _metrics['hits'])
else:
_metrics['misses'] += 1
_metrics['avg_miss_time'] += (dur / _metrics['misses'])
return res
inner.bust = bust
inner.make_key = make_key
if metrics:
inner.metrics = _metrics
return inner
return decorator
def cached_property(self, key_fn=_key_fn, timeout=None):
"""
Decorator that will transparently cache calls to the wrapped
method. The method will be exposed as a property.
Usage::
cache = Cache(my_database)
class Clock(object):
@cache.cached_property()
def now(self):
return datetime.datetime.now()
clock = Clock()
print clock.now
"""
this = self
class _cached_property(object):
def __init__(self, fn):
self._fn = this.cached(key_fn, timeout)(fn)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
return self._fn(instance)
def __delete__(self, obj):
self._fn.bust(obj)
def __set__(self, instance, value):
raise ValueError('Cannot set value of a cached property.')
def decorator(fn):
return _cached_property(fn)
return decorator
def cache_async(self, key_fn=_key_fn, timeout=3600):
"""
Decorator that will execute the cached function in a separate
thread. The function will immediately return, returning a
callable to the user. This callable can be used to check for
a return value.
For details, see the :ref:`cache-async` section of the docs.
:param key_fn: Function used to generate cache key.
:param int timeout: Cache timeout in seconds.
:returns: A new function which can be called to retrieve the
return value of the decorated function.
"""
def decorator(fn):
wrapped = self.cached(key_fn, timeout)(fn)
@wraps(fn)
def inner(*args, **kwargs):
q = Queue()
def _sub_fn():
q.put(wrapped(*args, **kwargs))
def _get_value(block=True, timeout=None):
if not hasattr(_get_value, '_return_value'):
result = q.get(block=block, timeout=timeout)
_get_value._return_value = result
return _get_value._return_value
thread = threading.Thread(target=_sub_fn)
thread.start()
return _get_value
return inner
return decorator
class sentinel(object):
pass
| {
"repo_name": "coleifer/walrus",
"path": "walrus/cache.py",
"copies": "1",
"size": "10016",
"license": "mit",
"hash": 7708376526262107000,
"line_mean": 30.4968553459,
"line_max": 79,
"alpha_frac": 0.5405351438,
"autogenerated": false,
"ratio": 4.406511218653762,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5447046362453762,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import hashlib
import pickle
from wrapt import ObjectProxy
class CacheMissError(RuntimeError):
"""
Exception thrown when memorized object is not found in cache and
error_on_miss is set to True.
"""
pass
class MemorizedObject(ObjectProxy):
"""
Thin wrapper around any memorized objects, adding attributes indicating
the object's cache key and whether or not it was returned from cache.
"""
def __init__(self, wrapped):
super(MemorizedObject, self).__init__(wrapped)
self._self_from_cache = False
self._self_cache_key = None
def __reduce__(self):
return type(self), (self.__wrapped__,)
def __repr__(self):
return repr(self.__wrapped__)
@property
def from_cache(self):
return self._self_from_cache
@from_cache.setter
def from_cache(self, value):
self._self_from_cache = value
@property
def cache_key(self):
return self._self_cache_key
@cache_key.setter
def cache_key(self, value):
self._self_cache_key = value
class memorize(object):
"""
Method decorator to memoize that method in memcached based on the name
of the method, the name of the class to whom that method belongs, and the
call signature.
Example:
class Book(object):
def __init__(self, title, author):
self.title = title
self.author = author
@memorize(cxn)
def attr(self, key):
return getattr(self, key)
gow = Book('Grapes of Wrath', 'John Steinbeck')
title = gow.attr('title') # 'Grapes of Wrath'
title.from_cache # False
title.cache_key # 'memorize_c952a93846d07a04f7bf127b7b640ca1'
title_2 = gow.attr('title') # 'Grapes of Wrath'
title_2.from_cache # True
title_2.cache_key # 'memorize_c952a93846d07a04f7bf127b7b640ca1'
author = gow.attr('author') # 'John Steinbeck'
author.from_cache # False
author.cache_key # 'memorize_cd2db0e4dc383ea0c5643ce6478612a3'
"""
def __init__(self, memcached, prefix='memorize', ttl=0,
error_on_miss=False):
self.memcached = memcached
self.prefix = prefix
self.ttl = ttl
self.error_on_miss = error_on_miss
def __call__(self, fn):
"""
Generate the cache key from the call signature. If there's a memcached
match, don't execute the function at all, instead
"""
@wraps(fn)
def wrapper(*args, **kwargs):
key = self._make_cache_key(fn, args, kwargs)
value = self.memcached.get(key)
if value:
value = MemorizedObject(value)
value.from_cache = True
else:
# TODO: kick off a task here, instead of calculating the value,
# then throwing an exception.
value = fn(*args, **kwargs)
self.memcached.set(key, value, time=self.ttl)
if self.error_on_miss:
raise CacheMissError()
value = MemorizedObject(value)
value.cache_key = key
return value
return wrapper
def _make_cache_key(self, fn, args, kwargs):
"""
Generate a cache key from 5 data points:
- The prefix specified when initializing the decorator.
- The name of the class whose method to which the decorator was
applied.
- The name of the method to which the decorator was applied.
- The decorator's arguments.
- The decorator's keyword arugments.
This requires all args and kwargs to be picklable.
"""
hashed = hashlib.md5()
pickled = pickle.dumps([fn.__name__, args, kwargs])
hashed.update(pickled)
return '%s_%s' % (self.prefix, hashed.hexdigest())
| {
"repo_name": "chuckharmston/ghosttown",
"path": "app/memorize.py",
"copies": "1",
"size": "3947",
"license": "mpl-2.0",
"hash": 8090964513126626000,
"line_mean": 30.0787401575,
"line_max": 79,
"alpha_frac": 0.5877881936,
"autogenerated": false,
"ratio": 3.907920792079208,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4995708985679208,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import hashlib
import time
import os
from flask import current_app, Response, request
from werkzeug.http import parse_authorization_header
from radiocrepe.storage import DistributedStorage
nonce_registry = {}
def with_hub_db(f):
@wraps(f)
def _wrapper(*args, **kwargs):
strg = DistributedStorage.bind(current_app.config)
return f(strg.db, strg, strg.node_registry, *args, **kwargs)
return _wrapper
class with_digest_auth(object):
def __init__(self, cred_provider):
self.cred_provider = cred_provider
def response(self, user, password, method, digest_uri, nonce,
cnonce, nc, qop):
ha1 = hashlib.md5("%s:%s:%s" % (user, self.cred_provider.realm, password)).hexdigest()
ha2 = hashlib.md5("%s:%s" % (method, digest_uri)).hexdigest()
return hashlib.md5("%s:%s:%s:%s:%s:%s" % \
(ha1, nonce, nc, cnonce, qop, ha2)).hexdigest()
def challenge(self, msg='authenticate first', stale=False, code=401):
is_stale = ', stale=true' if stale else ''
nonce = hashlib.sha1( os.urandom(10).encode('hex')).hexdigest()
nonce_registry[request.remote_addr] = (nonce, time.time())
return Response(msg, code,
{'WWW-Authenticate': 'Digest realm="{0}", qop="auth", nonce="{1}"{2}'.format(self.cred_provider.realm, nonce, is_stale)})
def __call__(self, f):
@wraps(f)
def _wrapper(*args, **kwargs):
if len(args) < 3:
raise Exception("'with_digest_auth' requires a 'registry'")
else:
self.cred_provider.set_session(args[2].db.session)
auth_header = request.headers.get('Authorization')
if not auth_header or not auth_header.startswith("Digest"):
return self.challenge()
auth = parse_authorization_header(auth_header)
if auth.username not in self.cred_provider:
return self.challenge('no such user')
if request.remote_addr in nonce_registry:
nonce, ts = nonce_registry.get(request.remote_addr, None)
else:
return self.challenge('no nonce')
if (time.time() - ts) > 600:
return self.challenge('nonce expired', stale=True)
result = self.response(auth.username, self.cred_provider.get(auth.username).secret_key,
request.method, request.path, auth.nonce,
auth.cnonce, auth.nc, auth.qop)
if auth.nonce == nonce and auth.realm == self.cred_provider.realm and auth.response == result:
return f(*args, user=self.cred_provider.get(auth.username),
**kwargs)
return self.challenge('wrong credentials' , code=403)
return _wrapper
| {
"repo_name": "pferreir/radiocrepe",
"path": "radiocrepe/web/util.py",
"copies": "1",
"size": "2908",
"license": "mit",
"hash": 1729710253519627800,
"line_mean": 38.8356164384,
"line_max": 137,
"alpha_frac": 0.5842503439,
"autogenerated": false,
"ratio": 3.9618528610354224,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5046103204935423,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import hashlib
from django.conf import settings
from django.core.cache import cache
from django.utils.decorators import decorator_from_middleware, available_attrs
def get_cache_key(request, key_prefix, key_generator=None):
"""
The cache is keyed by md5 of full request path including GET parameters,
thus enabling Memcached lookup straight from Nginx.
If key_generator is provided it is used instead to generate a key.
key_generator should be a callable accepting a request object and returning a string to be used as cache key.
Additionally there is the key prefix that is used to distinguish different
cache areas in a multi-site setup. The key prefix is prepended to the request path md5.
"""
key = key_generator(request) if key_generator else hashlib.md5(request.get_full_path()).hexdigest()
key_prefix = key_prefix or settings.CACHE_MIDDLEWARE_KEY_PREFIX
return key_prefix + key
def cache_page(timeout, key_prefix='', key_generator=None):
"""
Decorator for views that tries getting the page from the cache and
populates the cache if the page isn't in the cache yet.
The cache is keyed by md5 of full request path including GET parameters,
thus enabling Memcached lookup straight from Nginx.
"""
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
cache_key = get_cache_key(request, key_prefix, key_generator)
response = cache.get(cache_key, None)
if response is None:
response = view_func(request, *args, **kwargs)
cache.set(cache_key, response.content, timeout)
return response
return wraps(view_func, assigned=available_attrs(view_func))(_wrapped_view)
return decorator
| {
"repo_name": "shaunsephton/djanginxed",
"path": "djanginxed/decorators/cache.py",
"copies": "1",
"size": "1814",
"license": "bsd-3-clause",
"hash": 3281553517619224600,
"line_mean": 44.35,
"line_max": 113,
"alpha_frac": 0.7056229327,
"autogenerated": false,
"ratio": 4.288416075650118,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.01308642809643954,
"num_lines": 40
} |
from functools import wraps
import heapq
from annoying.functions import get_object_or_None
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.conf import settings
from django.utils import timezone
from django.utils.module_loading import import_string
from schedule.conf.settings import (
CHECK_EVENT_PERM_FUNC,
CHECK_CALENDAR_PERM_FUNC,
CHECK_OCCURRENCE_PERM_FUNC,
CALENDAR_VIEW_PERM)
class EventListManager(object):
"""
This class is responsible for doing functions on a list of events. It is
used to when one has a list of events and wants to access the occurrences
from these events in as a group
"""
def __init__(self, events):
self.events = events
def occurrences_after(self, after=None):
"""
It is often useful to know what the next occurrence is given a list of
events. This function produces a generator that yields the
the most recent occurrence after the date ``after`` from any of the
events in ``self.events``
"""
from schedule.models import Occurrence
if after is None:
after = timezone.now()
occ_replacer = OccurrenceReplacer(
Occurrence.objects.filter(event__in=self.events))
generators = [event._occurrences_after_generator(after) for event in self.events]
occurrences = []
for generator in generators:
try:
heapq.heappush(occurrences, (next(generator), generator))
except StopIteration:
pass
while True:
if len(occurrences) == 0:
raise StopIteration
generator = occurrences[0][1]
try:
next_occurence = heapq.heapreplace(occurrences, (next(generator), generator))[0]
except StopIteration:
next_occurence = heapq.heappop(occurrences)[0]
yield occ_replacer.get_occurrence(next_occurence)
class OccurrenceReplacer(object):
"""
When getting a list of occurrences, the last thing that needs to be done
before passing it forward is to make sure all of the occurrences that
have been stored in the datebase replace, in the list you are returning,
the generated ones that are equivalent. This class makes this easier.
"""
def __init__(self, persisted_occurrences):
lookup = [((occ.event, occ.original_start, occ.original_end), occ) for
occ in persisted_occurrences]
self.lookup = dict(lookup)
def get_occurrence(self, occ):
"""
Return a persisted occurrences matching the occ and remove it from lookup since it
has already been matched
"""
return self.lookup.pop(
(occ.event, occ.original_start, occ.original_end),
occ)
def has_occurrence(self, occ):
try:
return (occ.event, occ.original_start, occ.original_end) in self.lookup
except TypeError:
if not self.lookup:
return False
else:
raise TypeError('A problem with checking if a persisted occurence exists has occured!')
def get_additional_occurrences(self, start, end):
"""
Return persisted occurrences which are now in the period
"""
return [occ for _, occ in list(self.lookup.items()) if (occ.start < end and occ.end >= start and not occ.cancelled)]
def get_occurrence(request, *args, **kwargs):
from schedule.models import Occurrence
occurrence = None
if 'occurrence_id' in kwargs:
occurrence = get_object_or_None(Occurrence,
id=kwargs['occurrence_id'])
elif request.GET:
occurrence = get_object_or_None(Occurrence,
id=request.GET.get('occurrence_id', None))
elif request.POST:
occurrence = get_object_or_None(Occurrence,
id=request.POST.get('occurrence_id', None))
return occurrence
def get_event(occurrence, request, *args, **kwargs):
from schedule.models import Event
event = None
if occurrence:
event = occurrence.event
elif 'event_id' in kwargs:
event = get_object_or_None(Event,
id=kwargs['event_id'])
elif request.GET:
event = get_object_or_None(Event,
id=request.GET.get('event_id', None))
elif request.POST:
event = get_object_or_None(Event,
id=request.POST.get('event_id', None))
return event
def get_calendar(event, request, *args, **kwargs):
from schedule.models import Calendar
calendar = None
if event:
calendar = event.calendar
elif 'calendar_slug' in kwargs:
calendar = get_object_or_None(Calendar,
slug=kwargs['calendar_slug'])
elif request.GET:
calendar = get_object_or_None(Calendar,
slug=request.GET.get('calendar_slug', None))
elif request.POST:
calendar = get_object_or_None(Calendar,
slug=request.POST.get('calendar_slug', None))
return calendar
def get_objects(request, *args, **kwargs):
occurrence = get_occurrence(request, *args, **kwargs)
event = get_event(occurrence, request, *args, **kwargs)
calendar = get_calendar(event, request, *args, **kwargs)
return occurrence, event, calendar
def check_occurrence_permissions(function):
@wraps(function)
def decorator(request, *args, **kwargs):
from schedule.models import Event, Calendar, Occurrence
user = request.user
if not user:
return HttpResponseRedirect(settings.LOGIN_URL)
occurrence, event, calendar = get_objects(request, *args, **kwargs)
if calendar and event:
allowed = (CHECK_EVENT_PERM_FUNC(event, user) and \
CHECK_CALENDAR_PERM_FUNC(calendar, user) and \
CHECK_OCCURRENCE_PERM_FUNC(occurrence, user))
if not allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# all checks passed
return function(request, *args, **kwargs)
return HttpResponseNotFound('<h1>Page not found</h1>')
return decorator
def check_event_permissions(function):
@wraps(function)
def decorator(request, *args, **kwargs):
from schedule.models import Event, Calendar, Occurrence
user = request.user
if not user:
return HttpResponseRedirect(settings.LOGIN_URL)
occurrence, event, calendar = get_objects(request, *args, **kwargs)
if calendar:
allowed = (CHECK_EVENT_PERM_FUNC(event, user) and \
CHECK_CALENDAR_PERM_FUNC(calendar, user))
if not allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# all checks passed
return function(request, *args, **kwargs)
return HttpResponseNotFound('<h1>Page not found</h1>')
return decorator
def check_calendar_permissions(function):
@wraps(function)
def decorator(request, *args, **kwargs):
if CALENDAR_VIEW_PERM:
from schedule.models import Event, Calendar, Occurrence
user = request.user
if not user:
return HttpResponseRedirect(settings.LOGIN_URL)
occurrence, event, calendar = get_objects(request, *args, **kwargs)
if calendar:
allowed = CHECK_CALENDAR_PERM_FUNC(calendar, user)
if not allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# all checks passed
return function(request, *args, **kwargs)
return HttpResponseNotFound('<h1>Page not found</h1>')
return function(request, *args, **kwargs)
return decorator
def coerce_date_dict(date_dict):
"""
given a dictionary (presumed to be from request.GET) it returns a tuple
that represents a date. It will return from year down to seconds until one
is not found. ie if year, month, and seconds are in the dictionary, only
year and month will be returned, the rest will be returned as min. If none
of the parts are found return an empty tuple.
"""
keys = ['year', 'month', 'day', 'hour', 'minute', 'second']
ret_val = {
'year': 1,
'month': 1,
'day': 1,
'hour': 0,
'minute': 0,
'second': 0}
modified = False
for key in keys:
try:
ret_val[key] = int(date_dict[key])
modified = True
except KeyError:
break
return modified and ret_val or {}
def get_model_bases():
from django.db.models import Model
baseStrings = getattr(settings, 'SCHEDULER_BASE_CLASSES', None)
if baseStrings is None:
return [Model]
else:
return [import_string(x) for x in baseStrings] | {
"repo_name": "Gustavosdo/django-scheduler",
"path": "schedule/utils.py",
"copies": "2",
"size": "8845",
"license": "bsd-3-clause",
"hash": 8639920163481174000,
"line_mean": 36.3248945148,
"line_max": 124,
"alpha_frac": 0.6252119842,
"autogenerated": false,
"ratio": 4.172169811320755,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0031547917454074533,
"num_lines": 237
} |
from functools import wraps
import hmac
import hashlib
import time
import warnings
import logging
import requests
logger = logging.getLogger(__name__)
class BitstampError(Exception):
pass
class TransRange(object):
"""
Enum like object used in transaction method to specify time range
from which to get list of transactions
"""
HOUR = 'hour'
MINUTE = 'minute'
DAY = 'day'
class BaseClient(object):
"""
A base class for the API Client methods that handles interaction with
the requests library.
"""
api_url = {1: 'https://www.bitstamp.net/api/',
2: 'https://www.bitstamp.net/api/v2/'}
exception_on_error = True
def __init__(self, proxydict=None, *args, **kwargs):
self.proxydict = proxydict
def _get(self, *args, **kwargs):
"""
Make a GET request.
"""
return self._request(requests.get, *args, **kwargs)
def _post(self, *args, **kwargs):
"""
Make a POST request.
"""
data = self._default_data()
data.update(kwargs.get('data') or {})
kwargs['data'] = data
return self._request(requests.post, *args, **kwargs)
def _default_data(self):
"""
Default data for a POST request.
"""
return {}
def _construct_url(self, url, base, quote):
"""
Adds the orderbook to the url if base and quote are specified.
"""
if not base and not quote:
return url
else:
url = url + base.lower() + quote.lower() + "/"
return url
def _request(self, func, url, version=1, *args, **kwargs):
"""
Make a generic request, adding in any proxy defined by the instance.
Raises a ``requests.HTTPError`` if the response status isn't 200, and
raises a :class:`BitstampError` if the response contains a json encoded
error message.
"""
return_json = kwargs.pop('return_json', False)
url = self.api_url[version] + url
logger.debug("Request URL: " + url)
if 'data' in kwargs and 'nonce' in kwargs['data']:
logger.debug("Request nonce: " + str(kwargs['data']['nonce']))
response = func(url, *args, **kwargs)
logger.debug("Response Code {} and Reason {}".format(response.status_code, response.reason))
logger.debug("Response Text {}".format(response.text))
if 'proxies' not in kwargs:
kwargs['proxies'] = self.proxydict
# Check for error, raising an exception if appropriate.
response.raise_for_status()
try:
json_response = response.json()
except ValueError:
json_response = None
if isinstance(json_response, dict):
error = json_response.get('error')
if error:
raise BitstampError(error)
elif json_response.get('status') == "error":
raise BitstampError(json_response.get('reason'))
if return_json:
if json_response is None:
raise BitstampError(
"Could not decode json for: " + response.text)
return json_response
return response
class Public(BaseClient):
def ticker(self, base="btc", quote="usd"):
"""
Returns dictionary.
"""
url = self._construct_url("ticker/", base, quote)
return self._get(url, return_json=True, version=2)
def ticker_hour(self, base="btc", quote="usd"):
"""
Returns dictionary of the average ticker of the past hour.
"""
url = self._construct_url("ticker_hour/", base, quote)
return self._get(url, return_json=True, version=2)
def order_book(self, group=True, base="btc", quote="usd"):
"""
Returns dictionary with "bids" and "asks".
Each is a list of open orders and each order is represented as a list
of price and amount.
"""
params = {'group': group}
url = self._construct_url("order_book/", base, quote)
return self._get(url, params=params, return_json=True, version=2)
def transactions(self, time=TransRange.HOUR, base="btc", quote="usd"):
"""
Returns transactions for the last 'timedelta' seconds.
Parameter time is specified by one of two values of TransRange class.
"""
params = {'time': time}
url = self._construct_url("transactions/", base, quote)
return self._get(url, params=params, return_json=True, version=2)
def conversion_rate_usd_eur(self):
"""
Returns simple dictionary::
{'buy': 'buy conversion rate', 'sell': 'sell conversion rate'}
"""
return self._get("eur_usd/", return_json=True, version=1)
def trading_pairs_info(self):
"""
Returns list of dictionaries specifying details of each available trading pair::
{
'description':'Litecoin / U.S. dollar',
'name':'LTC/USD',
'url_symbol':'ltcusd',
'trading':'Enabled',
'minimum_order':'5.0 USD',
'counter_decimals':2,
'base_decimals':8
},
"""
return self._get("trading-pairs-info/", return_json=True, version=2)
class Trading(Public):
def __init__(self, username, key, secret, *args, **kwargs):
"""
Stores the username, key, and secret which is used when making POST
requests to Bitstamp.
"""
super(Trading, self).__init__(
username=username, key=key, secret=secret, *args, **kwargs)
self.username = username
self.key = key
self.secret = secret
def get_nonce(self):
"""
Get a unique nonce for the bitstamp API.
This integer must always be increasing, so use the current unix time.
Every time this variable is requested, it automatically increments to
allow for more than one API request per second.
This isn't a thread-safe function however, so you should only rely on a
single thread if you have a high level of concurrent API requests in
your application.
"""
nonce = getattr(self, '_nonce', 0)
if nonce:
nonce += 1
# If the unix time is greater though, use that instead (helps low
# concurrency multi-threaded apps always call with the largest nonce).
self._nonce = max(int(time.time()), nonce)
return self._nonce
def _default_data(self, *args, **kwargs):
"""
Generate a one-time signature and other data required to send a secure
POST request to the Bitstamp API.
"""
data = super(Trading, self)._default_data(*args, **kwargs)
data['key'] = self.key
nonce = self.get_nonce()
msg = str(nonce) + self.username + self.key
signature = hmac.new(
self.secret.encode('utf-8'), msg=msg.encode('utf-8'),
digestmod=hashlib.sha256).hexdigest().upper()
data['signature'] = signature
data['nonce'] = nonce
return data
def _expect_true(self, response):
"""
A shortcut that raises a :class:`BitstampError` if the response didn't
just contain the text 'true'.
"""
if response.text == u'true':
return True
raise BitstampError("Unexpected response")
def account_balance(self, base="btc", quote="usd"):
"""
Returns dictionary::
{u'btc_reserved': u'0',
u'fee': u'0.5000',
u'btc_available': u'2.30856098',
u'usd_reserved': u'0',
u'btc_balance': u'2.30856098',
u'usd_balance': u'114.64',
u'usd_available': u'114.64',
---If base and quote were specified:
u'fee': u'',
---If base and quote were not specified:
u'btcusd_fee': u'0.25',
u'btceur_fee': u'0.25',
u'eurusd_fee': u'0.20',
}
There could be reasons to set base and quote to None (or False),
because the result then will contain the fees for all currency pairs
For backwards compatibility this can not be the default however.
"""
url = self._construct_url("balance/", base, quote)
return self._post(url, return_json=True, version=2)
def user_transactions(self, offset=0, limit=100, descending=True,
base=None, quote=None):
"""
Returns descending list of transactions. Every transaction (dictionary)
contains::
{u'usd': u'-39.25',
u'datetime': u'2013-03-26 18:49:13',
u'fee': u'0.20',
u'btc': u'0.50000000',
u'type': 2,
u'id': 213642}
Instead of the keys btc and usd, it can contain other currency codes
"""
data = {
'offset': offset,
'limit': limit,
'sort': 'desc' if descending else 'asc',
}
url = self._construct_url("user_transactions/", base, quote)
return self._post(url, data=data, return_json=True, version=2)
def open_orders(self, base="btc", quote="usd"):
"""
Returns JSON list of open orders. Each order is represented as a
dictionary.
"""
url = self._construct_url("open_orders/", base, quote)
return self._post(url, return_json=True, version=2)
def all_open_orders(self):
"""
Returns JSON list of open orders of all currency pairs.
Each order is represented as a dictionary.
"""
return self._post('open_orders/all/', return_json=True, version=2)
def order_status(self, order_id):
"""
Returns dictionary.
- status: 'Finished'
or 'In Queue'
or 'Open'
- transactions: list of transactions
Each transaction is a dictionary with the following keys:
btc, usd, price, type, fee, datetime, tid
or btc, eur, ....
or eur, usd, ....
"""
data = {'id': order_id}
return self._post("order_status/", data=data, return_json=True,
version=1)
def cancel_order(self, order_id, version=1):
"""
Cancel the order specified by order_id.
Version 1 (default for backwards compatibility reasons):
Returns True if order was successfully canceled, otherwise
raise a BitstampError.
Version 2:
Returns dictionary of the canceled order, containing the keys:
id, type, price, amount
"""
data = {'id': order_id}
return self._post("cancel_order/", data=data, return_json=True,
version=version)
def cancel_all_orders(self):
"""
Cancel all open orders.
Returns True if it was successful, otherwise raises a
BitstampError.
"""
return self._post("cancel_all_orders/", return_json=True, version=1)
def buy_limit_order(self, amount, price, base="btc", quote="usd", limit_price=None, ioc_order=False):
"""
Order to buy amount of bitcoins for specified price.
"""
data = {'amount': amount, 'price': price}
if limit_price is not None:
data['limit_price'] = limit_price
if ioc_order is True:
data['ioc_order'] = True
url = self._construct_url("buy/", base, quote)
return self._post(url, data=data, return_json=True, version=2)
def buy_market_order(self, amount, base="btc", quote="usd"):
"""
Order to buy amount of bitcoins for market price.
"""
data = {'amount': amount}
url = self._construct_url("buy/market/", base, quote)
return self._post(url, data=data, return_json=True, version=2)
def sell_limit_order(self, amount, price, base="btc", quote="usd", limit_price=None, ioc_order=False):
"""
Order to sell amount of bitcoins for specified price.
"""
data = {'amount': amount, 'price': price}
if limit_price is not None:
data['limit_price'] = limit_price
if ioc_order is True:
data['ioc_order'] = True
url = self._construct_url("sell/", base, quote)
return self._post(url, data=data, return_json=True, version=2)
def sell_market_order(self, amount, base="btc", quote="usd"):
"""
Order to sell amount of bitcoins for market price.
"""
data = {'amount': amount}
url = self._construct_url("sell/market/", base, quote)
return self._post(url, data=data, return_json=True, version=2)
def check_bitstamp_code(self, code):
"""
Returns JSON dictionary containing USD and BTC amount included in given
bitstamp code.
"""
data = {'code': code}
return self._post("check_code/", data=data, return_json=True,
version=1)
def redeem_bitstamp_code(self, code):
"""
Returns JSON dictionary containing USD and BTC amount added to user's
account.
"""
data = {'code': code}
return self._post("redeem_code/", data=data, return_json=True,
version=1)
def withdrawal_requests(self, timedelta = 86400):
"""
Returns list of withdrawal requests.
Each request is represented as a dictionary.
By default, the last 24 hours (86400 seconds) are returned.
"""
data = {'timedelta': timedelta}
return self._post("withdrawal_requests/", return_json=True, version=1, data=data)
def bitcoin_withdrawal(self, amount, address):
"""
Send bitcoins to another bitcoin wallet specified by address.
"""
data = {'amount': amount, 'address': address}
return self._post("bitcoin_withdrawal/", data=data, return_json=True,
version=1)
def bitcoin_deposit_address(self):
"""
Returns bitcoin deposit address as unicode string
"""
return self._post("bitcoin_deposit_address/", return_json=True,
version=1)
def unconfirmed_bitcoin_deposits(self):
"""
Returns JSON list of unconfirmed bitcoin transactions.
Each transaction is represented as dictionary:
amount
bitcoin amount
address
deposit address used
confirmations
number of confirmations
"""
return self._post("unconfirmed_btc/", return_json=True, version=1)
def litecoin_withdrawal(self, amount, address):
"""
Send litecoins to another litecoin wallet specified by address.
"""
data = {'amount': amount, 'address': address}
return self._post("ltc_withdrawal/", data=data, return_json=True,
version=2)
def litecoin_deposit_address(self):
"""
Returns litecoin deposit address as unicode string
"""
return self._post("ltc_address/", return_json=True,
version=2)
def ethereum_withdrawal(self, amount, address):
"""
Send ethers to another ether wallet specified by address.
"""
data = {'amount': amount, 'address': address}
return self._post("eth_withdrawal/", data=data, return_json=True,
version=2)
def ethereum_deposit_address(self):
"""
Returns ethereum deposit address as unicode string
"""
return self._post("eth_address/", return_json=True,
version=2)
def ripple_withdrawal(self, amount, address, currency):
"""
Returns true if successful.
"""
data = {'amount': amount, 'address': address, 'currency': currency}
response = self._post("ripple_withdrawal/", data=data,
return_json=True)
return self._expect_true(response)
def ripple_deposit_address(self):
"""
Returns ripple deposit address as unicode string.
"""
return self._post("ripple_address/", version=1, return_json=True)[
"address"]
def xrp_withdrawal(self, amount, address, destination_tag=None):
"""
Sends xrps to another xrp wallet specified by address. Returns withdrawal id.
"""
data = {'amount': amount, 'address': address}
if destination_tag:
data['destination_tag'] = destination_tag
return self._post("xrp_withdrawal/", data=data, return_json=True,
version=2)["id"]
def xrp_deposit_address(self):
"""
Returns ripple deposit address and destination tag as dictionary.
Example: {u'destination_tag': 53965834, u'address': u'rDsbeamaa4FFwbQTJp9Rs84Q56vCiWCaBx'}
"""
return self._post("xrp_address/", version=2, return_json=True)
def bch_withdrawal(self, amount, address):
"""
Send bitcoin cash to another bitcoin cash wallet specified by address.
"""
data = {'amount': amount, 'address': address}
return self._post("bch_withdrawal/", data=data, return_json=True,
version=2)
def bch_deposit_address(self):
"""
Returns bitcoin cash deposit address as unicode string
"""
return self._post("bch_address/", return_json=True,
version=2)
def transfer_to_main(self, amount, currency, subaccount=None):
"""
Returns dictionary with status.
subaccount has to be the numerical id of the subaccount, not the name
"""
data = {'amount': amount,
'currency': currency,}
if subaccount is not None:
data['subAccount'] = subaccount
return self._post("transfer-to-main/", data=data, return_json=True,
version=2)
def transfer_from_main(self, amount, currency, subaccount):
"""
Returns dictionary with status.
subaccount has to be the numerical id of the subaccount, not the name
"""
data = {'amount': amount,
'currency': currency,
'subAccount': subaccount,}
return self._post("transfer-from-main/", data=data, return_json=True,
version=2)
# Backwards compatibility
class BackwardsCompat(object):
"""
Version 1 used lower case class names that didn't raise an exception when
Bitstamp returned a response indicating an error had occured.
Instead, it returned a tuple containing ``(False, 'The error message')``.
"""
wrapped_class = None
def __init__(self, *args, **kwargs):
"""
Instantiate the wrapped class.
"""
self.wrapped = self.wrapped_class(*args, **kwargs)
class_name = self.__class__.__name__
warnings.warn(
"Use the {} class rather than the deprecated {} one".format(
class_name.title(), class_name),
DeprecationWarning, stacklevel=2)
def __getattr__(self, name):
"""
Return the wrapped attribute. If it's a callable then return the error
tuple when appropriate.
"""
attr = getattr(self.wrapped, name)
if not callable(attr):
return attr
@wraps(attr)
def wrapped_callable(*args, **kwargs):
"""
Catch ``BitstampError`` and replace with the tuple error pair.
"""
try:
return attr(*args, **kwargs)
except BitstampError as e:
return False, e.args[0]
return wrapped_callable
class public(BackwardsCompat):
"""
Deprecated version 1 client. Use :class:`Public` instead.
"""
wrapped_class = Public
class trading(BackwardsCompat):
"""
Deprecated version 1 client. Use :class:`Trading` instead.
"""
wrapped_class = Trading
| {
"repo_name": "kmadac/bitstamp-python-client",
"path": "bitstamp/client.py",
"copies": "1",
"size": "20246",
"license": "mit",
"hash": -4918496093026124000,
"line_mean": 33.1993243243,
"line_max": 106,
"alpha_frac": 0.5674207251,
"autogenerated": false,
"ratio": 4.200414937759336,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5267835662859336,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import hmac
from flask import request, make_response, g, current_app, Response
permissions = ['r', 'w', 'u']
def _secret_key(key=None):
if key is None:
key = current_app.config['SECRET_KEY']
if isinstance(key, unicode): # pragma: no cover
key = key.encode('latin1') # ensure bytes
return key
def make_secure_token(*args, **options):
'''
This will create a secure token that you can use as an authentication
token for your users. It uses heavy-duty HMAC encryption to prevent people
from guessing the information. (To make it even more effective, if you
will never need to regenerate the token, you can pass some random data
as one of the arguments.)
:param \*args: The data to include in the token.
:type args: args
:param \*\*options: To manually specify a secret key, pass ``key=THE_KEY``.
Otherwise, the ``current_app`` secret key will be used.
:type \*\*options: kwargs
'''
key = options.get('key')
key = _secret_key(key)
l = [s if isinstance(s, bytes) else s.encode('utf-8') for s in args]
payload = b'\0'.join(l)
token_value = hmac.new(key, payload).hexdigest()
if hasattr(token_value, 'decode'): # pragma: no cover
token_value = token_value.decode('utf-8') # ensure bytes
return token_value
def login_required(func):
@wraps(func)
def decorated_view(*args, **kwargs):
if 'x-auth-token' in request.headers:
token = request.headers['x-auth-token']
from anarcho.models.token import Token
t = Token.query.filter_by(auth_token=token).first()
if t is not None:
g.user = t.user
return func(*args, **kwargs)
return Response('{"error":"unauthorized"}', 401, {'WWWAuthenticate': 'Basic realm="Login Required"'})
return decorated_view
def app_permissions(permissions=[]):
def decorator(func):
@wraps(func)
def decorated_view(*args, **kwargs):
from anarcho.models.user_app import UserApp
json = request.json if hasattr(request, 'json') else None
if json is not None and 'app_key' in json:
app_key = json['app_key']
elif 'app_key' in kwargs:
app_key = kwargs['app_key']
else:
raise ValueError("app_permissions : wrapped function should have"
" app_key in args or in request.json")
user = g.user
result = make_response('{"error":"not_enough_permission"}', 403)
if user:
user_app = UserApp.query.filter_by(app_key=app_key, user_id=user.id).first()
if user_app:
if user_app.permission in permissions:
result = func(*args, **kwargs)
else:
result = make_response('{"error":"app_not_found"}', 404)
return result
return decorated_view
return decorator
def is_permission_allowed(permission):
return permission in permissions | {
"repo_name": "nrudenko/anarcho",
"path": "anarchoApp/anarcho/access_manager.py",
"copies": "1",
"size": "3131",
"license": "mit",
"hash": 6645208918513681000,
"line_mean": 30.9591836735,
"line_max": 109,
"alpha_frac": 0.5927818588,
"autogenerated": false,
"ratio": 4.019255455712452,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5112037314512452,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import importlib
import logging
from google.appengine.ext.webapp import template
import webapp2
class AdminApplication(webapp2.WSGIApplication):
'''
The Application Admin is the central element in the admin framework.
It keeps references and reverse references to AdminModel classes and ndb
Model classes.
And since it inherits from webapp2.WSGIApplication, it also natively
directs HTTP requests to the right Request Handlers.
'''
def __init__(self, *args, **kwargs):
'''
AdminApplication initialization method. Initializes data structures
that keep track of AdminModel classes and ndb Model classes.
'''
super(AdminApplication, self).__init__(*args, **kwargs)
self.registered_models = {}
self.models_by_partial_key_format = {}
self.models_by_key_format = {}
self.routes_prefix = ''
def get_table_names(self):
'''
Use the AdminApplication internal data to return the list of ndb Model
classes registered in the admin app.
Returns:
list: table names (list of strings).
'''
table_names = [k[0] for k in self.models_by_partial_key_format]
table_names.sort()
return table_names
def register(self, model_admin, model):
'''
Bind a AdminModel class to an ndb Model class.
The registration process has 2 steps:
* Create internal references to bind the AdminModel and ndb.Model
classes to be able to:
* Find an ``AdminModel`` class using an ``ndb.Model`` name
* Find an ndb.Model class using a partial ``KEY_FORMAT``
(e.g.: ``('PropertyBase', int, 'UserSong')`` ->
``<class models.UserSong>``)
* Find an ndb.Model class using a full ``KEY_FORMAT``
(e.g.: ``('PropertyBase', int, 'UserSong', int)`` ->
``<class models.UserSong>``)
* Generate the webapp2 Routes for each ``AdminModel``, and register
them in the app.
Args:
model_admin: AdminModel class object.
model: ndb.Model class object.
Returns:
None.
'''
logging.info('REGISTERING Admin Model {} with {}'.format(
model_admin.__name__,
model.__name__
))
self.registered_models[model.__name__] = model_admin
self.models_by_partial_key_format[model.KEY_FORMAT[-2:-1]] = model
self.models_by_key_format[model.KEY_FORMAT] = model
for route in model_admin.generate_routes(model):
self.router.add(route)
def _register_home_route(self):
'''
Register the Home View Route.
Returns:
None.
'''
self.router.add(
webapp2.Route(
r'{prefix}'.format(prefix=self.routes_prefix),
handler='smadmin.core.request_handlers.HomeViewRequestHandler',
name='smadmin-home-view',
methods=['GET'],
schemes=['http', 'https']
)
)
def discover_admins(self, *modules):
'''
Import Python modules from the project that uses the admin.
AdminModel classes that are decorated with ``register()`` will be
registered in the AdminApplication.
Args:
modules: list of strings that represent importable Python modules.
Returns:
None.
'''
for module in modules:
try:
# TODO: handle packages and relative imports
importlib.import_module(module)
except Exception, e:
logging.exception(e)
pass
# At this point, all Model Admins are suppose to be registered
self._register_home_route()
# Enabled PATCH method
# http://stackoverflow.com/questions/16280496
allowed_methods = AdminApplication.allowed_methods
new_allowed_methods = allowed_methods.union(('PATCH',))
AdminApplication.allowed_methods = new_allowed_methods
app = AdminApplication()
# Register custom Template Filters
template.register_template_library('smadmin.core.smtemplatefilters')
class register(object):
'''
Class decorator to register an ndb Model with an AdminModel.
'''
def __init__(self, model):
'''
Initialize the ``register`` decorator to save the ``ndb.Model`` class.
A ``AdminModel`` class will be bound to that ``ndb.Model`` class when
the ``AdminApplication`` imports the module that contains the
``AdminModel`` class.
'''
self.model = model
def __call__(self, cls):
'''
Method called when the decorated ``AdminModel`` class is imported.
This allows the ``AdminApplication`` to bind the ``AdminModel`` to the
``ndb.Model``.
'''
app.register(cls, self.model)
cls.model = self.model
@wraps(cls)
def wrapper(*args, **kwargs):
pass
return wrapper
| {
"repo_name": "sminteractive/ndb-gae-admin",
"path": "smadmin/core/app.py",
"copies": "1",
"size": "5073",
"license": "apache-2.0",
"hash": 2499360666341212700,
"line_mean": 31.3121019108,
"line_max": 79,
"alpha_frac": 0.5980682042,
"autogenerated": false,
"ratio": 4.489380530973452,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5587448735173451,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
from itertools import chain
import logging
from django.conf import settings
from django.contrib import messages
from django.core import cache
from django.http import HttpRequest, HttpResponse
from django.utils import translation
from cachecow.cache import set_cache, make_key, key_arg_iterator
from cachecow.intpacker import pack_int
logger = logging.getLogger(__name__)
def _make_key_for_func(key_args, func_args, func_kwargs, namespace=None,
skip_prefix=False):
'''
Returns the cache key to use for the decorated function. Calls and replaces
any callable items in `key_args` with their return values before sending
`key_args` over to `make_key`. Does the same for a callable `namespace`.
'''
def call_if_callable(obj):
if callable(obj):
return obj(*func_args, **func_kwargs)
return obj
key_args = call_if_callable(key_args)
key_args = map(call_if_callable, key_arg_iterator(key_args))
return make_key(key_args, namespace=namespace, skip_prefix=skip_prefix)
def _add_delete_cache_member(func, key=None, namespace=None, add_user_to_key=False):
'''
Adds a `delete_cache` member function to `func`. Pass it the same args
as `func` so that it can find the right key to delete.
If `func` was decorated with `cached_function` or `cached_view` with a
`key` parameter specified, `delete_cache` takes no arguments.
'''
def delete_cache(*args, **kwargs):
key_args = key or _make_key_args_from_function(func, *args, **kwargs)
if add_user_to_key and kwargs.get('user') is not None:
# We can assume that key is specified (see cached_view's docstring).
key_args = chain(key_arg_iterator(key_args, max_depth=0),
[kwargs['user']])
_namespace = None
if namespace is not None:
_namespace = _make_key_for_func(namespace, args, kwargs,
skip_prefix=True)
_key = _make_key_for_func(key_args, args, kwargs, namespace=_namespace)
logger.debug(u'deleting cache for key: {}'.format(_key))
cache.cache.delete(_key)
func.delete_cache = delete_cache
def _make_key_args_from_function(func, *args, **kwargs):
'''
Add a bunch of hopefully uniquely identifying parameters to a list to be
passed to `make_key`. It's pretty crafty in finding distinguishing params
to use, but it is slightly conservative so as to not overrun the memcached
key length limit, which results in a non-human-readable hash for a key.
'''
key_args = ['cached_func', func.__name__]
# This works on both functions and class methods.
signature_args = inspect.getargspec(func).args
if (inspect.ismethod(func)
or (signature_args and signature_args[0] in ['self', 'cls'])):
# Method, probably.
#
# If ismethod returns True, it's definitely a method. Otherwise,
# we have to guess based on the first arg of the function's signature.
# This is the best guess we can have in Python, because the way a
# function is passed to a decorator inside a class definition, the
# decorated function is as yet neither a bound nor unbound method. It's
# just a regular function. So we must guess from its args.
#
# A guess is good enough, since it only means that we add some extra
# fields from the first arg. If we're wrong, the key is more
# conservative (more detailed) than need be. We could wrongly call it a
# function when it's actually a method, but only if they're doing
# unsightly things like naming the "self" or "cls" arg something else.
self = args[0]
key_args.append(self.__class__.__name__)
if hasattr(self, 'pk'): # django model? `pk` is a great differentiator!
key_args.append(self.pk)
key_args.extend(args[1:])
else:
# Function.
key_args.extend(args)
key_args.extend(kwargs.values())
# To be extra safe! (unreadable, so at end of key.)
# If this results in any collisions, it actually won't make a difference.
# It's fine to memoize functions that collide on this as if
# they are one, since they're identical if their codeblock hash is the same.
key_args.append(pack_int(func.__code__.__hash__()))
return key_args
def cached_function(timeout=None, key=None, namespace=None):
'''
Memoizes a function or class method using the Django cache backend.
Adds a member to the decorated function, `delete_cache`. Call it with the
same args as the decorated function.
All kwargs, `timeout`, `key`, and `namespace`, are optional.
`timeout` can be either an int, or a timedelta (or None).
`key` is used to create a key for the cached value. It is processed and
serialized by CacheCow's `make_key`, so please refer to its documentation.
However, if `key` is None, we'll automatically and determinisically create a
uniquely identifying key for the function which is hopefully human-readable.
Additionally, if `key` is callable, or if it's an iterable containing any
callables, they will be called with the same args and kwargs as the
decorated function, and their return values will be serialized and added to
the key.
`namespace` is used as an alternative way to invalidate a key or a group of
keys. When `namespace` is used, all keys that belong to the given namespace
can be invalidated simply by passing the namespace name to
`invalidate_namespace`. This is especially helpful when you start worker
processes that don't know what's already in the cache, since it relieves
you of needing to keep track of every key you set if you want to invalidate
a group of them at once.
If `namespace` is a function, it will be called with the same arguments as
the decorated function, and its return value will be passed to `make_key` to
transform it into the cache key used. Unlike the `key` argument though,
`namespace` cannot be an iterable that contains functions inside it.
Giving a function for `namespace` lets you create namespaces dynamically
depending on some of the arguments passed to whatever you're caching. For
example, you may want to cache several functions in the same namespace
depending on the current user.
Note that any `namespace` functions *must* be deterministic: given the same
input arguments, it must always produce the same output.
'''
def decorator(func):
_add_delete_cache_member(func, key=key, namespace=namespace)
@wraps(func)
def wrapped(*args, **kwargs):
key_args = key
if key is None:
key_args = _make_key_args_from_function(func, *args, **kwargs)
_namespace = None
if namespace is not None:
_namespace = _make_key_for_func(namespace, args, kwargs,
skip_prefix=True)
_key = _make_key_for_func(key_args, args, kwargs,
namespace=_namespace)
val = cache.cache.get(_key)
if val is None:
val = func(*args, **kwargs)
set_cache(_key, val, timeout)
return val
return wrapped
return decorator
def _can_cache_request(request, *args, **kwargs):
'''
Only caches if the request is for GET or HEAD, and if the Django messages
app has no messages available for the user.
'''
if len(messages.get_messages(request)) != 0:
return False
return request.method == 'GET'
def _can_cache_response(response):
return (response.status_code != 200
or 'no-cache' in response.get('Cache-Control', '')
or 'no-cache' in response.get('Pragma', ''))
def cached_view(timeout=None, key=None, namespace=None, add_user_to_key=False,
request_gatekeeper=_can_cache_request,
response_gatekeeper=_can_cache_response,
cached_response_wrapper=HttpResponse,
serializer=lambda response: response.content):
'''
Use this instead of `cached_function` for caching views. See
`cached_function` for documentation on how to use this.
Handles HttpRequest objects intelligently when auto-generating the
cache key.
Only caches GET and HEAD requests which have an HTTP 200 response code.
Doesn't cache responses which have "Cache-Control: no-cache" or
"Pragma: no-cache" in the headers.
If `add_user_to_key` is True, the key will be prefixed with the logged-in
user's ID when logged in. Currently this can only be used if `key` is also
specified, in order to avoid conflicts with function kwargs.
'''
if add_user_to_key and key is None:
raise ValueError("Cannot use add_user_to_key without also specifing key.")
def decorator(func):
_add_delete_cache_member(func, key=key, namespace=namespace,
add_user_to_key=add_user_to_key)
@wraps(func)
def wrapped(request, *args, **kwargs):
if not request_gatekeeper(request, *args, **kwargs):
return func(request, *args, **kwargs)
key_args = key
# Default key.
if not key_args:
# Don't naively add the `request` arg to the cache key.
key_args = _make_key_args_from_function(func, *args, **kwargs)
# Only add specific parts of the `request` object to the key.
key_args.extend(chain.from_iterable(request.GET.items()))
key_args.append(request.method)
# Current language.
key_args.append(translation.get_language())
# Current site, if available.
key_args.append(getattr(settings, 'SITE_ID', None))
if add_user_to_key and request.user.is_authenticated():
key_args = chain(key_arg_iterator(key_args, max_depth=0),
[request.user.id])
# Serialize the key.
# Add `request` to `args` since _make_key wants all func args in it.
_args = (request,) + args
_namespace = None
if namespace is not None:
_namespace = _make_key_for_func(namespace, _args, kwargs,
skip_prefix=True)
_key = _make_key_for_func(key_args, _args, kwargs, namespace=_namespace)
resp = None
val = cache.cache.get(_key)
logger.debug(u'getting cache from {}: {}'.format(_key, val))
if val is None:
resp = func(request, *args, **kwargs)
if response_gatekeeper(resp):
set_cache(_key, serializer(resp), timeout)
else:
resp = cached_response_wrapper(val)
return resp
return wrapped
return decorator
| {
"repo_name": "aehlke/django-cachecow",
"path": "cachecow/decorators.py",
"copies": "1",
"size": "11169",
"license": "bsd-3-clause",
"hash": 9104367011049402000,
"line_mean": 39.6145454545,
"line_max": 84,
"alpha_frac": 0.6306741875,
"autogenerated": false,
"ratio": 4.1675373134328355,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005350313062744092,
"num_lines": 275
} |
from functools import wraps
import inspect
from textwrap import dedent
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
import warnings
from pandas._libs.properties import cache_readonly # noqa
def deprecate(
name: str,
alternative: Callable,
version: str,
alt_name: Optional[str] = None,
klass: Optional[Type[Warning]] = None,
stacklevel: int = 2,
msg: Optional[str] = None,
) -> Callable:
"""
Return a new function that emits a deprecation warning on use.
To use this method for a deprecated function, another function
`alternative` with the same signature must exist. The deprecated
function will emit a deprecation warning, and in the docstring
it will contain the deprecation directive with the provided version
so it can be detected for future removal.
Parameters
----------
name : str
Name of function to deprecate.
alternative : func
Function to use instead.
version : str
Version of pandas in which the method has been deprecated.
alt_name : str, optional
Name to use in preference of alternative.__name__.
klass : Warning, default FutureWarning
stacklevel : int, default 2
msg : str
The message to display in the warning.
Default is '{name} is deprecated. Use {alt_name} instead.'
"""
alt_name = alt_name or alternative.__name__
klass = klass or FutureWarning
warning_msg = msg or "{} is deprecated, use {} instead".format(name, alt_name)
@wraps(alternative)
def wrapper(*args, **kwargs):
warnings.warn(warning_msg, klass, stacklevel=stacklevel)
return alternative(*args, **kwargs)
# adding deprecated directive to the docstring
msg = msg or "Use `{alt_name}` instead.".format(alt_name=alt_name)
doc_error_msg = (
"deprecate needs a correctly formatted docstring in "
"the target function (should have a one liner short "
"summary, and opening quotes should be in their own "
"line). Found:\n{}".format(alternative.__doc__)
)
# when python is running in optimized mode (i.e. `-OO`), docstrings are
# removed, so we check that a docstring with correct formatting is used
# but we allow empty docstrings
if alternative.__doc__:
if alternative.__doc__.count("\n") < 3:
raise AssertionError(doc_error_msg)
empty1, summary, empty2, doc = alternative.__doc__.split("\n", 3)
if empty1 or empty2 and not summary:
raise AssertionError(doc_error_msg)
wrapper.__doc__ = dedent(
"""
{summary}
.. deprecated:: {depr_version}
{depr_msg}
{rest_of_docstring}"""
).format(
summary=summary.strip(),
depr_version=version,
depr_msg=msg,
rest_of_docstring=dedent(doc),
)
return wrapper
def deprecate_kwarg(
old_arg_name: str,
new_arg_name: Optional[str],
mapping: Optional[Union[Dict, Callable[[Any], Any]]] = None,
stacklevel: int = 2,
) -> Callable:
"""
Decorator to deprecate a keyword argument of a function.
Parameters
----------
old_arg_name : str
Name of argument in function to deprecate
new_arg_name : str or None
Name of preferred argument in function. Use None to raise warning that
``old_arg_name`` keyword is deprecated.
mapping : dict or callable
If mapping is present, use it to translate old arguments to
new arguments. A callable must do its own value checking;
values not found in a dict will be forwarded unchanged.
Examples
--------
The following deprecates 'cols', using 'columns' instead
>>> @deprecate_kwarg(old_arg_name='cols', new_arg_name='columns')
... def f(columns=''):
... print(columns)
...
>>> f(columns='should work ok')
should work ok
>>> f(cols='should raise warning')
FutureWarning: cols is deprecated, use columns instead
warnings.warn(msg, FutureWarning)
should raise warning
>>> f(cols='should error', columns="can\'t pass do both")
TypeError: Can only specify 'cols' or 'columns', not both
>>> @deprecate_kwarg('old', 'new', {'yes': True, 'no': False})
... def f(new=False):
... print('yes!' if new else 'no!')
...
>>> f(old='yes')
FutureWarning: old='yes' is deprecated, use new=True instead
warnings.warn(msg, FutureWarning)
yes!
To raise a warning that a keyword will be removed entirely in the future
>>> @deprecate_kwarg(old_arg_name='cols', new_arg_name=None)
... def f(cols='', another_param=''):
... print(cols)
...
>>> f(cols='should raise warning')
FutureWarning: the 'cols' keyword is deprecated and will be removed in a
future version please takes steps to stop use of 'cols'
should raise warning
>>> f(another_param='should not raise warning')
should not raise warning
>>> f(cols='should raise warning', another_param='')
FutureWarning: the 'cols' keyword is deprecated and will be removed in a
future version please takes steps to stop use of 'cols'
should raise warning
"""
if mapping is not None and not hasattr(mapping, "get") and not callable(mapping):
raise TypeError(
"mapping from old to new argument values " "must be dict or callable!"
)
def _deprecate_kwarg(func):
@wraps(func)
def wrapper(*args, **kwargs):
old_arg_value = kwargs.pop(old_arg_name, None)
if new_arg_name is None and old_arg_value is not None:
msg = (
"the '{old_name}' keyword is deprecated and will be "
"removed in a future version. "
"Please take steps to stop the use of '{old_name}'"
).format(old_name=old_arg_name)
warnings.warn(msg, FutureWarning, stacklevel=stacklevel)
kwargs[old_arg_name] = old_arg_value
return func(*args, **kwargs)
if old_arg_value is not None:
if mapping is not None:
if hasattr(mapping, "get"):
new_arg_value = mapping.get(old_arg_value, old_arg_value)
else:
new_arg_value = mapping(old_arg_value)
msg = (
"the {old_name}={old_val!r} keyword is deprecated, "
"use {new_name}={new_val!r} instead"
).format(
old_name=old_arg_name,
old_val=old_arg_value,
new_name=new_arg_name,
new_val=new_arg_value,
)
else:
new_arg_value = old_arg_value
msg = (
"the '{old_name}' keyword is deprecated, "
"use '{new_name}' instead"
).format(old_name=old_arg_name, new_name=new_arg_name)
warnings.warn(msg, FutureWarning, stacklevel=stacklevel)
if kwargs.get(new_arg_name, None) is not None:
msg = (
"Can only specify '{old_name}' or '{new_name}', " "not both"
).format(old_name=old_arg_name, new_name=new_arg_name)
raise TypeError(msg)
else:
kwargs[new_arg_name] = new_arg_value
return func(*args, **kwargs)
return wrapper
return _deprecate_kwarg
def rewrite_axis_style_signature(
name: str, extra_params: List[Tuple[str, Any]]
) -> Callable:
def decorate(func):
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
kind = inspect.Parameter.POSITIONAL_OR_KEYWORD
params = [
inspect.Parameter("self", kind),
inspect.Parameter(name, kind, default=None),
inspect.Parameter("index", kind, default=None),
inspect.Parameter("columns", kind, default=None),
inspect.Parameter("axis", kind, default=None),
]
for pname, default in extra_params:
params.append(inspect.Parameter(pname, kind, default=default))
sig = inspect.Signature(params)
func.__signature__ = sig
return wrapper
return decorate
# Substitution and Appender are derived from matplotlib.docstring (1.1.0)
# module http://matplotlib.org/users/license.html
class Substitution:
"""
A decorator to take a function's docstring and perform string
substitution on it.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter)
Usage: construct a docstring.Substitution with a sequence or
dictionary suitable for performing substitution; then
decorate a suitable function with the constructed object. e.g.
sub_author_name = Substitution(author='Jason')
@sub_author_name
def some_function(x):
"%(author)s wrote this function"
# note that some_function.__doc__ is now "Jason wrote this function"
One can also use positional arguments.
sub_first_last_names = Substitution('Edgar Allen', 'Poe')
@sub_first_last_names
def some_function(x):
"%s %s wrote the Raven"
"""
def __init__(self, *args, **kwargs):
if args and kwargs:
raise AssertionError("Only positional or keyword args are allowed")
self.params = args or kwargs
def __call__(self, func: Callable) -> Callable:
func.__doc__ = func.__doc__ and func.__doc__ % self.params
return func
def update(self, *args, **kwargs) -> None:
"""
Update self.params with supplied args.
If called, we assume self.params is a dict.
"""
self.params.update(*args, **kwargs)
class Appender:
"""
A function decorator that will append an addendum to the docstring
of the target function.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter).
Usage: construct a docstring.Appender with a string to be joined to
the original docstring. An optional 'join' parameter may be supplied
which will be used to join the docstring and addendum. e.g.
add_copyright = Appender("Copyright (c) 2009", join='\n')
@add_copyright
def my_dog(has='fleas'):
"This docstring will have a copyright below"
pass
"""
def __init__(self, addendum: Optional[str], join: str = "", indents: int = 0):
if indents > 0:
self.addendum = indent(addendum, indents=indents) # type: Optional[str]
else:
self.addendum = addendum
self.join = join
def __call__(self, func: Callable) -> Callable:
func.__doc__ = func.__doc__ if func.__doc__ else ""
self.addendum = self.addendum if self.addendum else ""
docitems = [func.__doc__, self.addendum]
func.__doc__ = dedent(self.join.join(docitems))
return func
def indent(text: Optional[str], indents: int = 1) -> str:
if not text or not isinstance(text, str):
return ""
jointext = "".join(["\n"] + [" "] * indents)
return jointext.join(text.split("\n"))
| {
"repo_name": "toobaz/pandas",
"path": "pandas/util/_decorators.py",
"copies": "2",
"size": "11438",
"license": "bsd-3-clause",
"hash": -1336472687813775400,
"line_mean": 33.1432835821,
"line_max": 85,
"alpha_frac": 0.5959083756,
"autogenerated": false,
"ratio": 4.188209447088979,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5784117822688979,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
from textwrap import dedent
from typing import Any, Callable, List, Mapping, Optional, Tuple, Type, Union, cast
import warnings
from pandas._libs.properties import cache_readonly # noqa
from pandas._typing import F
def deprecate(
name: str,
alternative: Callable[..., Any],
version: str,
alt_name: Optional[str] = None,
klass: Optional[Type[Warning]] = None,
stacklevel: int = 2,
msg: Optional[str] = None,
) -> Callable[[F], F]:
"""
Return a new function that emits a deprecation warning on use.
To use this method for a deprecated function, another function
`alternative` with the same signature must exist. The deprecated
function will emit a deprecation warning, and in the docstring
it will contain the deprecation directive with the provided version
so it can be detected for future removal.
Parameters
----------
name : str
Name of function to deprecate.
alternative : func
Function to use instead.
version : str
Version of pandas in which the method has been deprecated.
alt_name : str, optional
Name to use in preference of alternative.__name__.
klass : Warning, default FutureWarning
stacklevel : int, default 2
msg : str
The message to display in the warning.
Default is '{name} is deprecated. Use {alt_name} instead.'
"""
alt_name = alt_name or alternative.__name__
klass = klass or FutureWarning
warning_msg = msg or f"{name} is deprecated, use {alt_name} instead"
@wraps(alternative)
def wrapper(*args, **kwargs) -> Callable[..., Any]:
warnings.warn(warning_msg, klass, stacklevel=stacklevel)
return alternative(*args, **kwargs)
# adding deprecated directive to the docstring
msg = msg or f"Use `{alt_name}` instead."
doc_error_msg = (
"deprecate needs a correctly formatted docstring in "
"the target function (should have a one liner short "
"summary, and opening quotes should be in their own "
f"line). Found:\n{alternative.__doc__}"
)
# when python is running in optimized mode (i.e. `-OO`), docstrings are
# removed, so we check that a docstring with correct formatting is used
# but we allow empty docstrings
if alternative.__doc__:
if alternative.__doc__.count("\n") < 3:
raise AssertionError(doc_error_msg)
empty1, summary, empty2, doc = alternative.__doc__.split("\n", 3)
if empty1 or empty2 and not summary:
raise AssertionError(doc_error_msg)
wrapper.__doc__ = dedent(
f"""
{summary.strip()}
.. deprecated:: {version}
{msg}
{dedent(doc)}"""
)
return wrapper
def deprecate_kwarg(
old_arg_name: str,
new_arg_name: Optional[str],
mapping: Optional[Union[Mapping[Any, Any], Callable[[Any], Any]]] = None,
stacklevel: int = 2,
) -> Callable[[F], F]:
"""
Decorator to deprecate a keyword argument of a function.
Parameters
----------
old_arg_name : str
Name of argument in function to deprecate
new_arg_name : str or None
Name of preferred argument in function. Use None to raise warning that
``old_arg_name`` keyword is deprecated.
mapping : dict or callable
If mapping is present, use it to translate old arguments to
new arguments. A callable must do its own value checking;
values not found in a dict will be forwarded unchanged.
Examples
--------
The following deprecates 'cols', using 'columns' instead
>>> @deprecate_kwarg(old_arg_name='cols', new_arg_name='columns')
... def f(columns=''):
... print(columns)
...
>>> f(columns='should work ok')
should work ok
>>> f(cols='should raise warning')
FutureWarning: cols is deprecated, use columns instead
warnings.warn(msg, FutureWarning)
should raise warning
>>> f(cols='should error', columns="can\'t pass do both")
TypeError: Can only specify 'cols' or 'columns', not both
>>> @deprecate_kwarg('old', 'new', {'yes': True, 'no': False})
... def f(new=False):
... print('yes!' if new else 'no!')
...
>>> f(old='yes')
FutureWarning: old='yes' is deprecated, use new=True instead
warnings.warn(msg, FutureWarning)
yes!
To raise a warning that a keyword will be removed entirely in the future
>>> @deprecate_kwarg(old_arg_name='cols', new_arg_name=None)
... def f(cols='', another_param=''):
... print(cols)
...
>>> f(cols='should raise warning')
FutureWarning: the 'cols' keyword is deprecated and will be removed in a
future version please takes steps to stop use of 'cols'
should raise warning
>>> f(another_param='should not raise warning')
should not raise warning
>>> f(cols='should raise warning', another_param='')
FutureWarning: the 'cols' keyword is deprecated and will be removed in a
future version please takes steps to stop use of 'cols'
should raise warning
"""
if mapping is not None and not hasattr(mapping, "get") and not callable(mapping):
raise TypeError(
"mapping from old to new argument values must be dict or callable!"
)
def _deprecate_kwarg(func: F) -> F:
@wraps(func)
def wrapper(*args, **kwargs) -> Callable[..., Any]:
old_arg_value = kwargs.pop(old_arg_name, None)
if old_arg_value is not None:
if new_arg_name is None:
msg = (
f"the {repr(old_arg_name)} keyword is deprecated and "
"will be removed in a future version. Please take "
f"steps to stop the use of {repr(old_arg_name)}"
)
warnings.warn(msg, FutureWarning, stacklevel=stacklevel)
kwargs[old_arg_name] = old_arg_value
return func(*args, **kwargs)
elif mapping is not None:
if callable(mapping):
new_arg_value = mapping(old_arg_value)
else:
new_arg_value = mapping.get(old_arg_value, old_arg_value)
msg = (
f"the {old_arg_name}={repr(old_arg_value)} keyword is "
"deprecated, use "
f"{new_arg_name}={repr(new_arg_value)} instead"
)
else:
new_arg_value = old_arg_value
msg = (
f"the {repr(old_arg_name)}' keyword is deprecated, "
f"use {repr(new_arg_name)} instead"
)
warnings.warn(msg, FutureWarning, stacklevel=stacklevel)
if kwargs.get(new_arg_name) is not None:
msg = (
f"Can only specify {repr(old_arg_name)} "
f"or {repr(new_arg_name)}, not both"
)
raise TypeError(msg)
else:
kwargs[new_arg_name] = new_arg_value
return func(*args, **kwargs)
return cast(F, wrapper)
return _deprecate_kwarg
def _format_argument_list(allow_args: Union[List[str], int]):
"""
Convert the allow_args argument (either string or integer) of
`deprecate_nonkeyword_arguments` function to a string describing
it to be inserted into warning message.
Parameters
----------
allowed_args : list, tuple or int
The `allowed_args` argument for `deprecate_nonkeyword_arguments`,
but None value is not allowed.
Returns
-------
s : str
The substring describing the argument list in best way to be
inserted to the warning message.
Examples
--------
`format_argument_list(0)` -> ''
`format_argument_list(1)` -> 'except for the first argument'
`format_argument_list(2)` -> 'except for the first 2 arguments'
`format_argument_list([])` -> ''
`format_argument_list(['a'])` -> "except for the arguments 'a'"
`format_argument_list(['a', 'b'])` -> "except for the arguments 'a' and 'b'"
`format_argument_list(['a', 'b', 'c'])` ->
"except for the arguments 'a', 'b' and 'c'"
"""
if not allow_args:
return ""
elif allow_args == 1:
return " except for the first argument"
elif isinstance(allow_args, int):
return f" except for the first {allow_args} arguments"
elif len(allow_args) == 1:
return f" except for the argument '{allow_args[0]}'"
else:
last = allow_args[-1]
args = ", ".join(["'" + x + "'" for x in allow_args[:-1]])
return f" except for the arguments {args} and '{last}'"
def deprecate_nonkeyword_arguments(
version: str,
allowed_args: Optional[Union[List[str], int]] = None,
stacklevel: int = 2,
) -> Callable:
"""
Decorator to deprecate a use of non-keyword arguments of a function.
Parameters
----------
version : str
The version in which positional arguments will become
keyword-only.
allowed_args : list or int, optional
In case of list, it must be the list of names of some
first arguments of the decorated functions that are
OK to be given as positional arguments. In case of an
integer, this is the number of positional arguments
that will stay positional. In case of None value,
defaults to list of all arguments not having the
default value.
stacklevel : int, default=2
The stack level for warnings.warn
"""
def decorate(func):
if allowed_args is not None:
allow_args = allowed_args
else:
spec = inspect.getfullargspec(func)
# We must have some defaults if we are deprecating default-less
assert spec.defaults is not None # for mypy
allow_args = spec.args[: -len(spec.defaults)]
@wraps(func)
def wrapper(*args, **kwargs):
arguments = _format_argument_list(allow_args)
if isinstance(allow_args, (list, tuple)):
num_allow_args = len(allow_args)
else:
num_allow_args = allow_args
if len(args) > num_allow_args:
msg = (
f"Starting with Pandas version {version} all arguments of "
f"{func.__name__}{arguments} will be keyword-only"
)
warnings.warn(msg, FutureWarning, stacklevel=stacklevel)
return func(*args, **kwargs)
return wrapper
return decorate
def rewrite_axis_style_signature(
name: str, extra_params: List[Tuple[str, Any]]
) -> Callable[..., Any]:
def decorate(func: F) -> F:
@wraps(func)
def wrapper(*args, **kwargs) -> Callable[..., Any]:
return func(*args, **kwargs)
kind = inspect.Parameter.POSITIONAL_OR_KEYWORD
params = [
inspect.Parameter("self", kind),
inspect.Parameter(name, kind, default=None),
inspect.Parameter("index", kind, default=None),
inspect.Parameter("columns", kind, default=None),
inspect.Parameter("axis", kind, default=None),
]
for pname, default in extra_params:
params.append(inspect.Parameter(pname, kind, default=default))
sig = inspect.Signature(params)
# https://github.com/python/typing/issues/598
# error: "F" has no attribute "__signature__"
func.__signature__ = sig # type: ignore[attr-defined]
return cast(F, wrapper)
return decorate
def doc(*docstrings: Union[str, Callable], **params) -> Callable[[F], F]:
"""
A decorator take docstring templates, concatenate them and perform string
substitution on it.
This decorator will add a variable "_docstring_components" to the wrapped
callable to keep track the original docstring template for potential usage.
If it should be consider as a template, it will be saved as a string.
Otherwise, it will be saved as callable, and later user __doc__ and dedent
to get docstring.
Parameters
----------
*docstrings : str or callable
The string / docstring / docstring template to be appended in order
after default docstring under callable.
**params
The string which would be used to format docstring template.
"""
def decorator(decorated: F) -> F:
# collecting docstring and docstring templates
docstring_components: List[Union[str, Callable]] = []
if decorated.__doc__:
docstring_components.append(dedent(decorated.__doc__))
for docstring in docstrings:
if hasattr(docstring, "_docstring_components"):
# error: Item "str" of "Union[str, Callable[..., Any]]" has no
# attribute "_docstring_components" [union-attr]
# error: Item "function" of "Union[str, Callable[..., Any]]"
# has no attribute "_docstring_components" [union-attr]
docstring_components.extend(
docstring._docstring_components # type: ignore[union-attr]
)
elif isinstance(docstring, str) or docstring.__doc__:
docstring_components.append(docstring)
# formatting templates and concatenating docstring
decorated.__doc__ = "".join(
[
component.format(**params)
if isinstance(component, str)
else dedent(component.__doc__ or "")
for component in docstring_components
]
)
# error: "F" has no attribute "_docstring_components"
decorated._docstring_components = ( # type: ignore[attr-defined]
docstring_components
)
return decorated
return decorator
# Substitution and Appender are derived from matplotlib.docstring (1.1.0)
# module https://matplotlib.org/users/license.html
class Substitution:
"""
A decorator to take a function's docstring and perform string
substitution on it.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter)
Usage: construct a docstring.Substitution with a sequence or
dictionary suitable for performing substitution; then
decorate a suitable function with the constructed object. e.g.
sub_author_name = Substitution(author='Jason')
@sub_author_name
def some_function(x):
"%(author)s wrote this function"
# note that some_function.__doc__ is now "Jason wrote this function"
One can also use positional arguments.
sub_first_last_names = Substitution('Edgar Allen', 'Poe')
@sub_first_last_names
def some_function(x):
"%s %s wrote the Raven"
"""
def __init__(self, *args, **kwargs):
if args and kwargs:
raise AssertionError("Only positional or keyword args are allowed")
self.params = args or kwargs
def __call__(self, func: F) -> F:
func.__doc__ = func.__doc__ and func.__doc__ % self.params
return func
def update(self, *args, **kwargs) -> None:
"""
Update self.params with supplied args.
"""
if isinstance(self.params, dict):
self.params.update(*args, **kwargs)
class Appender:
"""
A function decorator that will append an addendum to the docstring
of the target function.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter).
Usage: construct a docstring.Appender with a string to be joined to
the original docstring. An optional 'join' parameter may be supplied
which will be used to join the docstring and addendum. e.g.
add_copyright = Appender("Copyright (c) 2009", join='\n')
@add_copyright
def my_dog(has='fleas'):
"This docstring will have a copyright below"
pass
"""
addendum: Optional[str]
def __init__(self, addendum: Optional[str], join: str = "", indents: int = 0):
if indents > 0:
self.addendum = indent(addendum, indents=indents)
else:
self.addendum = addendum
self.join = join
def __call__(self, func: F) -> F:
func.__doc__ = func.__doc__ if func.__doc__ else ""
self.addendum = self.addendum if self.addendum else ""
docitems = [func.__doc__, self.addendum]
func.__doc__ = dedent(self.join.join(docitems))
return func
def indent(text: Optional[str], indents: int = 1) -> str:
if not text or not isinstance(text, str):
return ""
jointext = "".join(["\n"] + [" "] * indents)
return jointext.join(text.split("\n"))
| {
"repo_name": "gfyoung/pandas",
"path": "pandas/util/_decorators.py",
"copies": "2",
"size": "17021",
"license": "bsd-3-clause",
"hash": 6662914865063588000,
"line_mean": 34.0948453608,
"line_max": 85,
"alpha_frac": 0.5954996769,
"autogenerated": false,
"ratio": 4.29281210592686,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5888311782826859,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
from textwrap import dedent
import warnings
from pandas._libs.properties import cache_readonly # noqa
from pandas.compat import PY2, callable, signature
def deprecate(name, alternative, version, alt_name=None,
klass=None, stacklevel=2, msg=None):
"""Return a new function that emits a deprecation warning on use.
To use this method for a deprecated function, another function
`alternative` with the same signature must exist. The deprecated
function will emit a deprecation warning, and in the docstring
it will contain the deprecation directive with the provided version
so it can be detected for future removal.
Parameters
----------
name : str
Name of function to deprecate.
alternative : func
Function to use instead.
version : str
Version of pandas in which the method has been deprecated.
alt_name : str, optional
Name to use in preference of alternative.__name__.
klass : Warning, default FutureWarning
stacklevel : int, default 2
msg : str
The message to display in the warning.
Default is '{name} is deprecated. Use {alt_name} instead.'
"""
alt_name = alt_name or alternative.__name__
klass = klass or FutureWarning
warning_msg = msg or '{} is deprecated, use {} instead'.format(name,
alt_name)
@wraps(alternative)
def wrapper(*args, **kwargs):
warnings.warn(warning_msg, klass, stacklevel=stacklevel)
return alternative(*args, **kwargs)
# adding deprecated directive to the docstring
msg = msg or 'Use `{alt_name}` instead.'.format(alt_name=alt_name)
doc_error_msg = ('deprecate needs a correctly formatted docstring in '
'the target function (should have a one liner short '
'summary, and opening quotes should be in their own '
'line). Found:\n{}'.format(alternative.__doc__))
# when python is running in optimized mode (i.e. `-OO`), docstrings are
# removed, so we check that a docstring with correct formatting is used
# but we allow empty docstrings
if alternative.__doc__:
if alternative.__doc__.count('\n') < 3:
raise AssertionError(doc_error_msg)
empty1, summary, empty2, doc = alternative.__doc__.split('\n', 3)
if empty1 or empty2 and not summary:
raise AssertionError(doc_error_msg)
wrapper.__doc__ = dedent("""
{summary}
.. deprecated:: {depr_version}
{depr_msg}
{rest_of_docstring}""").format(summary=summary.strip(),
depr_version=version,
depr_msg=msg,
rest_of_docstring=dedent(doc))
return wrapper
def deprecate_kwarg(old_arg_name, new_arg_name, mapping=None, stacklevel=2):
"""
Decorator to deprecate a keyword argument of a function.
Parameters
----------
old_arg_name : str
Name of argument in function to deprecate
new_arg_name : str or None
Name of preferred argument in function. Use None to raise warning that
``old_arg_name`` keyword is deprecated.
mapping : dict or callable
If mapping is present, use it to translate old arguments to
new arguments. A callable must do its own value checking;
values not found in a dict will be forwarded unchanged.
Examples
--------
The following deprecates 'cols', using 'columns' instead
>>> @deprecate_kwarg(old_arg_name='cols', new_arg_name='columns')
... def f(columns=''):
... print(columns)
...
>>> f(columns='should work ok')
should work ok
>>> f(cols='should raise warning')
FutureWarning: cols is deprecated, use columns instead
warnings.warn(msg, FutureWarning)
should raise warning
>>> f(cols='should error', columns="can\'t pass do both")
TypeError: Can only specify 'cols' or 'columns', not both
>>> @deprecate_kwarg('old', 'new', {'yes': True, 'no': False})
... def f(new=False):
... print('yes!' if new else 'no!')
...
>>> f(old='yes')
FutureWarning: old='yes' is deprecated, use new=True instead
warnings.warn(msg, FutureWarning)
yes!
To raise a warning that a keyword will be removed entirely in the future
>>> @deprecate_kwarg(old_arg_name='cols', new_arg_name=None)
... def f(cols='', another_param=''):
... print(cols)
...
>>> f(cols='should raise warning')
FutureWarning: the 'cols' keyword is deprecated and will be removed in a
future version please takes steps to stop use of 'cols'
should raise warning
>>> f(another_param='should not raise warning')
should not raise warning
>>> f(cols='should raise warning', another_param='')
FutureWarning: the 'cols' keyword is deprecated and will be removed in a
future version please takes steps to stop use of 'cols'
should raise warning
"""
if mapping is not None and not hasattr(mapping, 'get') and \
not callable(mapping):
raise TypeError("mapping from old to new argument values "
"must be dict or callable!")
def _deprecate_kwarg(func):
@wraps(func)
def wrapper(*args, **kwargs):
old_arg_value = kwargs.pop(old_arg_name, None)
if new_arg_name is None and old_arg_value is not None:
msg = (
"the '{old_name}' keyword is deprecated and will be "
"removed in a future version. "
"Please take steps to stop the use of '{old_name}'"
).format(old_name=old_arg_name)
warnings.warn(msg, FutureWarning, stacklevel=stacklevel)
kwargs[old_arg_name] = old_arg_value
return func(*args, **kwargs)
if old_arg_value is not None:
if mapping is not None:
if hasattr(mapping, 'get'):
new_arg_value = mapping.get(old_arg_value,
old_arg_value)
else:
new_arg_value = mapping(old_arg_value)
msg = ("the {old_name}={old_val!r} keyword is deprecated, "
"use {new_name}={new_val!r} instead"
).format(old_name=old_arg_name,
old_val=old_arg_value,
new_name=new_arg_name,
new_val=new_arg_value)
else:
new_arg_value = old_arg_value
msg = ("the '{old_name}' keyword is deprecated, "
"use '{new_name}' instead"
).format(old_name=old_arg_name,
new_name=new_arg_name)
warnings.warn(msg, FutureWarning, stacklevel=stacklevel)
if kwargs.get(new_arg_name, None) is not None:
msg = ("Can only specify '{old_name}' or '{new_name}', "
"not both").format(old_name=old_arg_name,
new_name=new_arg_name)
raise TypeError(msg)
else:
kwargs[new_arg_name] = new_arg_value
return func(*args, **kwargs)
return wrapper
return _deprecate_kwarg
def rewrite_axis_style_signature(name, extra_params):
def decorate(func):
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
if not PY2:
kind = inspect.Parameter.POSITIONAL_OR_KEYWORD
params = [
inspect.Parameter('self', kind),
inspect.Parameter(name, kind, default=None),
inspect.Parameter('index', kind, default=None),
inspect.Parameter('columns', kind, default=None),
inspect.Parameter('axis', kind, default=None),
]
for pname, default in extra_params:
params.append(inspect.Parameter(pname, kind, default=default))
sig = inspect.Signature(params)
func.__signature__ = sig
return wrapper
return decorate
# Substitution and Appender are derived from matplotlib.docstring (1.1.0)
# module http://matplotlib.org/users/license.html
class Substitution(object):
"""
A decorator to take a function's docstring and perform string
substitution on it.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter)
Usage: construct a docstring.Substitution with a sequence or
dictionary suitable for performing substitution; then
decorate a suitable function with the constructed object. e.g.
sub_author_name = Substitution(author='Jason')
@sub_author_name
def some_function(x):
"%(author)s wrote this function"
# note that some_function.__doc__ is now "Jason wrote this function"
One can also use positional arguments.
sub_first_last_names = Substitution('Edgar Allen', 'Poe')
@sub_first_last_names
def some_function(x):
"%s %s wrote the Raven"
"""
def __init__(self, *args, **kwargs):
if (args and kwargs):
raise AssertionError("Only positional or keyword args are allowed")
self.params = args or kwargs
def __call__(self, func):
func.__doc__ = func.__doc__ and func.__doc__ % self.params
return func
def update(self, *args, **kwargs):
"""
Update self.params with supplied args.
If called, we assume self.params is a dict.
"""
self.params.update(*args, **kwargs)
@classmethod
def from_params(cls, params):
"""
In the case where the params is a mutable sequence (list or dictionary)
and it may change before this class is called, one may explicitly use a
reference to the params rather than using *args or **kwargs which will
copy the values and not reference them.
"""
result = cls()
result.params = params
return result
class Appender(object):
"""
A function decorator that will append an addendum to the docstring
of the target function.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter).
Usage: construct a docstring.Appender with a string to be joined to
the original docstring. An optional 'join' parameter may be supplied
which will be used to join the docstring and addendum. e.g.
add_copyright = Appender("Copyright (c) 2009", join='\n')
@add_copyright
def my_dog(has='fleas'):
"This docstring will have a copyright below"
pass
"""
def __init__(self, addendum, join='', indents=0):
if indents > 0:
self.addendum = indent(addendum, indents=indents)
else:
self.addendum = addendum
self.join = join
def __call__(self, func):
func.__doc__ = func.__doc__ if func.__doc__ else ''
self.addendum = self.addendum if self.addendum else ''
docitems = [func.__doc__, self.addendum]
func.__doc__ = dedent(self.join.join(docitems))
return func
def indent(text, indents=1):
if not text or not isinstance(text, str):
return ''
jointext = ''.join(['\n'] + [' '] * indents)
return jointext.join(text.split('\n'))
def make_signature(func):
"""
Returns a tuple containing the paramenter list with defaults
and parameter list.
Examples
--------
>>> def f(a, b, c=2):
>>> return a * b * c
>>> print(make_signature(f))
(['a', 'b', 'c=2'], ['a', 'b', 'c'])
"""
spec = signature(func)
if spec.defaults is None:
n_wo_defaults = len(spec.args)
defaults = ('',) * n_wo_defaults
else:
n_wo_defaults = len(spec.args) - len(spec.defaults)
defaults = ('',) * n_wo_defaults + tuple(spec.defaults)
args = []
for var, default in zip(spec.args, defaults):
args.append(var if default == '' else var + '=' + repr(default))
if spec.varargs:
args.append('*' + spec.varargs)
if spec.keywords:
args.append('**' + spec.keywords)
return args, spec.args
| {
"repo_name": "GuessWhoSamFoo/pandas",
"path": "pandas/util/_decorators.py",
"copies": "1",
"size": "12597",
"license": "bsd-3-clause",
"hash": -7086385044083773000,
"line_mean": 34.7869318182,
"line_max": 79,
"alpha_frac": 0.5812495039,
"autogenerated": false,
"ratio": 4.300785250938887,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5382034754838887,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
from textwrap import dedent
import warnings
from pandas._libs.properties import cache_readonly # noqa
from pandas.compat import PY2, signature
def deprecate(name, alternative, version, alt_name=None,
klass=None, stacklevel=2, msg=None):
"""
Return a new function that emits a deprecation warning on use.
To use this method for a deprecated function, another function
`alternative` with the same signature must exist. The deprecated
function will emit a deprecation warning, and in the docstring
it will contain the deprecation directive with the provided version
so it can be detected for future removal.
Parameters
----------
name : str
Name of function to deprecate.
alternative : func
Function to use instead.
version : str
Version of pandas in which the method has been deprecated.
alt_name : str, optional
Name to use in preference of alternative.__name__.
klass : Warning, default FutureWarning
stacklevel : int, default 2
msg : str
The message to display in the warning.
Default is '{name} is deprecated. Use {alt_name} instead.'
"""
alt_name = alt_name or alternative.__name__
klass = klass or FutureWarning
warning_msg = msg or '{} is deprecated, use {} instead'.format(name,
alt_name)
@wraps(alternative)
def wrapper(*args, **kwargs):
warnings.warn(warning_msg, klass, stacklevel=stacklevel)
return alternative(*args, **kwargs)
# adding deprecated directive to the docstring
msg = msg or 'Use `{alt_name}` instead.'.format(alt_name=alt_name)
doc_error_msg = ('deprecate needs a correctly formatted docstring in '
'the target function (should have a one liner short '
'summary, and opening quotes should be in their own '
'line). Found:\n{}'.format(alternative.__doc__))
# when python is running in optimized mode (i.e. `-OO`), docstrings are
# removed, so we check that a docstring with correct formatting is used
# but we allow empty docstrings
if alternative.__doc__:
if alternative.__doc__.count('\n') < 3:
raise AssertionError(doc_error_msg)
empty1, summary, empty2, doc = alternative.__doc__.split('\n', 3)
if empty1 or empty2 and not summary:
raise AssertionError(doc_error_msg)
wrapper.__doc__ = dedent("""
{summary}
.. deprecated:: {depr_version}
{depr_msg}
{rest_of_docstring}""").format(summary=summary.strip(),
depr_version=version,
depr_msg=msg,
rest_of_docstring=dedent(doc))
return wrapper
def deprecate_kwarg(old_arg_name, new_arg_name, mapping=None, stacklevel=2):
"""
Decorator to deprecate a keyword argument of a function.
Parameters
----------
old_arg_name : str
Name of argument in function to deprecate
new_arg_name : str or None
Name of preferred argument in function. Use None to raise warning that
``old_arg_name`` keyword is deprecated.
mapping : dict or callable
If mapping is present, use it to translate old arguments to
new arguments. A callable must do its own value checking;
values not found in a dict will be forwarded unchanged.
Examples
--------
The following deprecates 'cols', using 'columns' instead
>>> @deprecate_kwarg(old_arg_name='cols', new_arg_name='columns')
... def f(columns=''):
... print(columns)
...
>>> f(columns='should work ok')
should work ok
>>> f(cols='should raise warning')
FutureWarning: cols is deprecated, use columns instead
warnings.warn(msg, FutureWarning)
should raise warning
>>> f(cols='should error', columns="can\'t pass do both")
TypeError: Can only specify 'cols' or 'columns', not both
>>> @deprecate_kwarg('old', 'new', {'yes': True, 'no': False})
... def f(new=False):
... print('yes!' if new else 'no!')
...
>>> f(old='yes')
FutureWarning: old='yes' is deprecated, use new=True instead
warnings.warn(msg, FutureWarning)
yes!
To raise a warning that a keyword will be removed entirely in the future
>>> @deprecate_kwarg(old_arg_name='cols', new_arg_name=None)
... def f(cols='', another_param=''):
... print(cols)
...
>>> f(cols='should raise warning')
FutureWarning: the 'cols' keyword is deprecated and will be removed in a
future version please takes steps to stop use of 'cols'
should raise warning
>>> f(another_param='should not raise warning')
should not raise warning
>>> f(cols='should raise warning', another_param='')
FutureWarning: the 'cols' keyword is deprecated and will be removed in a
future version please takes steps to stop use of 'cols'
should raise warning
"""
if mapping is not None and not hasattr(mapping, 'get') and \
not callable(mapping):
raise TypeError("mapping from old to new argument values "
"must be dict or callable!")
def _deprecate_kwarg(func):
@wraps(func)
def wrapper(*args, **kwargs):
old_arg_value = kwargs.pop(old_arg_name, None)
if new_arg_name is None and old_arg_value is not None:
msg = (
"the '{old_name}' keyword is deprecated and will be "
"removed in a future version. "
"Please take steps to stop the use of '{old_name}'"
).format(old_name=old_arg_name)
warnings.warn(msg, FutureWarning, stacklevel=stacklevel)
kwargs[old_arg_name] = old_arg_value
return func(*args, **kwargs)
if old_arg_value is not None:
if mapping is not None:
if hasattr(mapping, 'get'):
new_arg_value = mapping.get(old_arg_value,
old_arg_value)
else:
new_arg_value = mapping(old_arg_value)
msg = ("the {old_name}={old_val!r} keyword is deprecated, "
"use {new_name}={new_val!r} instead"
).format(old_name=old_arg_name,
old_val=old_arg_value,
new_name=new_arg_name,
new_val=new_arg_value)
else:
new_arg_value = old_arg_value
msg = ("the '{old_name}' keyword is deprecated, "
"use '{new_name}' instead"
).format(old_name=old_arg_name,
new_name=new_arg_name)
warnings.warn(msg, FutureWarning, stacklevel=stacklevel)
if kwargs.get(new_arg_name, None) is not None:
msg = ("Can only specify '{old_name}' or '{new_name}', "
"not both").format(old_name=old_arg_name,
new_name=new_arg_name)
raise TypeError(msg)
else:
kwargs[new_arg_name] = new_arg_value
return func(*args, **kwargs)
return wrapper
return _deprecate_kwarg
def rewrite_axis_style_signature(name, extra_params):
def decorate(func):
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
if not PY2:
kind = inspect.Parameter.POSITIONAL_OR_KEYWORD
params = [
inspect.Parameter('self', kind),
inspect.Parameter(name, kind, default=None),
inspect.Parameter('index', kind, default=None),
inspect.Parameter('columns', kind, default=None),
inspect.Parameter('axis', kind, default=None),
]
for pname, default in extra_params:
params.append(inspect.Parameter(pname, kind, default=default))
sig = inspect.Signature(params)
func.__signature__ = sig
return wrapper
return decorate
# Substitution and Appender are derived from matplotlib.docstring (1.1.0)
# module http://matplotlib.org/users/license.html
class Substitution(object):
"""
A decorator to take a function's docstring and perform string
substitution on it.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter)
Usage: construct a docstring.Substitution with a sequence or
dictionary suitable for performing substitution; then
decorate a suitable function with the constructed object. e.g.
sub_author_name = Substitution(author='Jason')
@sub_author_name
def some_function(x):
"%(author)s wrote this function"
# note that some_function.__doc__ is now "Jason wrote this function"
One can also use positional arguments.
sub_first_last_names = Substitution('Edgar Allen', 'Poe')
@sub_first_last_names
def some_function(x):
"%s %s wrote the Raven"
"""
def __init__(self, *args, **kwargs):
if (args and kwargs):
raise AssertionError("Only positional or keyword args are allowed")
self.params = args or kwargs
def __call__(self, func):
func.__doc__ = func.__doc__ and func.__doc__ % self.params
return func
def update(self, *args, **kwargs):
"""
Update self.params with supplied args.
If called, we assume self.params is a dict.
"""
self.params.update(*args, **kwargs)
@classmethod
def from_params(cls, params):
"""
In the case where the params is a mutable sequence (list or dictionary)
and it may change before this class is called, one may explicitly use a
reference to the params rather than using *args or **kwargs which will
copy the values and not reference them.
"""
result = cls()
result.params = params
return result
class Appender(object):
"""
A function decorator that will append an addendum to the docstring
of the target function.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter).
Usage: construct a docstring.Appender with a string to be joined to
the original docstring. An optional 'join' parameter may be supplied
which will be used to join the docstring and addendum. e.g.
add_copyright = Appender("Copyright (c) 2009", join='\n')
@add_copyright
def my_dog(has='fleas'):
"This docstring will have a copyright below"
pass
"""
def __init__(self, addendum, join='', indents=0):
if indents > 0:
self.addendum = indent(addendum, indents=indents)
else:
self.addendum = addendum
self.join = join
def __call__(self, func):
func.__doc__ = func.__doc__ if func.__doc__ else ''
self.addendum = self.addendum if self.addendum else ''
docitems = [func.__doc__, self.addendum]
func.__doc__ = dedent(self.join.join(docitems))
return func
def indent(text, indents=1):
if not text or not isinstance(text, str):
return ''
jointext = ''.join(['\n'] + [' '] * indents)
return jointext.join(text.split('\n'))
def make_signature(func):
"""
Returns a tuple containing the paramenter list with defaults
and parameter list.
Examples
--------
>>> def f(a, b, c=2):
>>> return a * b * c
>>> print(make_signature(f))
(['a', 'b', 'c=2'], ['a', 'b', 'c'])
"""
spec = signature(func)
if spec.defaults is None:
n_wo_defaults = len(spec.args)
defaults = ('',) * n_wo_defaults
else:
n_wo_defaults = len(spec.args) - len(spec.defaults)
defaults = ('',) * n_wo_defaults + tuple(spec.defaults)
args = []
for var, default in zip(spec.args, defaults):
args.append(var if default == '' else var + '=' + repr(default))
if spec.varargs:
args.append('*' + spec.varargs)
if spec.keywords:
args.append('**' + spec.keywords)
return args, spec.args
| {
"repo_name": "MJuddBooth/pandas",
"path": "pandas/util/_decorators.py",
"copies": "1",
"size": "12592",
"license": "bsd-3-clause",
"hash": 8748035394769752000,
"line_mean": 34.671388102,
"line_max": 79,
"alpha_frac": 0.5808449809,
"autogenerated": false,
"ratio": 4.300546448087432,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 353
} |
from functools import wraps
import inspect
from textwrap import dedent
import warnings
from pandas._libs.properties import cache_readonly # noqa
def deprecate(name, alternative, version, alt_name=None,
klass=None, stacklevel=2, msg=None):
"""
Return a new function that emits a deprecation warning on use.
To use this method for a deprecated function, another function
`alternative` with the same signature must exist. The deprecated
function will emit a deprecation warning, and in the docstring
it will contain the deprecation directive with the provided version
so it can be detected for future removal.
Parameters
----------
name : str
Name of function to deprecate.
alternative : func
Function to use instead.
version : str
Version of pandas in which the method has been deprecated.
alt_name : str, optional
Name to use in preference of alternative.__name__.
klass : Warning, default FutureWarning
stacklevel : int, default 2
msg : str
The message to display in the warning.
Default is '{name} is deprecated. Use {alt_name} instead.'
"""
alt_name = alt_name or alternative.__name__
klass = klass or FutureWarning
warning_msg = msg or '{} is deprecated, use {} instead'.format(name,
alt_name)
@wraps(alternative)
def wrapper(*args, **kwargs):
warnings.warn(warning_msg, klass, stacklevel=stacklevel)
return alternative(*args, **kwargs)
# adding deprecated directive to the docstring
msg = msg or 'Use `{alt_name}` instead.'.format(alt_name=alt_name)
doc_error_msg = ('deprecate needs a correctly formatted docstring in '
'the target function (should have a one liner short '
'summary, and opening quotes should be in their own '
'line). Found:\n{}'.format(alternative.__doc__))
# when python is running in optimized mode (i.e. `-OO`), docstrings are
# removed, so we check that a docstring with correct formatting is used
# but we allow empty docstrings
if alternative.__doc__:
if alternative.__doc__.count('\n') < 3:
raise AssertionError(doc_error_msg)
empty1, summary, empty2, doc = alternative.__doc__.split('\n', 3)
if empty1 or empty2 and not summary:
raise AssertionError(doc_error_msg)
wrapper.__doc__ = dedent("""
{summary}
.. deprecated:: {depr_version}
{depr_msg}
{rest_of_docstring}""").format(summary=summary.strip(),
depr_version=version,
depr_msg=msg,
rest_of_docstring=dedent(doc))
return wrapper
def deprecate_kwarg(old_arg_name, new_arg_name, mapping=None, stacklevel=2):
"""
Decorator to deprecate a keyword argument of a function.
Parameters
----------
old_arg_name : str
Name of argument in function to deprecate
new_arg_name : str or None
Name of preferred argument in function. Use None to raise warning that
``old_arg_name`` keyword is deprecated.
mapping : dict or callable
If mapping is present, use it to translate old arguments to
new arguments. A callable must do its own value checking;
values not found in a dict will be forwarded unchanged.
Examples
--------
The following deprecates 'cols', using 'columns' instead
>>> @deprecate_kwarg(old_arg_name='cols', new_arg_name='columns')
... def f(columns=''):
... print(columns)
...
>>> f(columns='should work ok')
should work ok
>>> f(cols='should raise warning')
FutureWarning: cols is deprecated, use columns instead
warnings.warn(msg, FutureWarning)
should raise warning
>>> f(cols='should error', columns="can\'t pass do both")
TypeError: Can only specify 'cols' or 'columns', not both
>>> @deprecate_kwarg('old', 'new', {'yes': True, 'no': False})
... def f(new=False):
... print('yes!' if new else 'no!')
...
>>> f(old='yes')
FutureWarning: old='yes' is deprecated, use new=True instead
warnings.warn(msg, FutureWarning)
yes!
To raise a warning that a keyword will be removed entirely in the future
>>> @deprecate_kwarg(old_arg_name='cols', new_arg_name=None)
... def f(cols='', another_param=''):
... print(cols)
...
>>> f(cols='should raise warning')
FutureWarning: the 'cols' keyword is deprecated and will be removed in a
future version please takes steps to stop use of 'cols'
should raise warning
>>> f(another_param='should not raise warning')
should not raise warning
>>> f(cols='should raise warning', another_param='')
FutureWarning: the 'cols' keyword is deprecated and will be removed in a
future version please takes steps to stop use of 'cols'
should raise warning
"""
if mapping is not None and not hasattr(mapping, 'get') and \
not callable(mapping):
raise TypeError("mapping from old to new argument values "
"must be dict or callable!")
def _deprecate_kwarg(func):
@wraps(func)
def wrapper(*args, **kwargs):
old_arg_value = kwargs.pop(old_arg_name, None)
if new_arg_name is None and old_arg_value is not None:
msg = (
"the '{old_name}' keyword is deprecated and will be "
"removed in a future version. "
"Please take steps to stop the use of '{old_name}'"
).format(old_name=old_arg_name)
warnings.warn(msg, FutureWarning, stacklevel=stacklevel)
kwargs[old_arg_name] = old_arg_value
return func(*args, **kwargs)
if old_arg_value is not None:
if mapping is not None:
if hasattr(mapping, 'get'):
new_arg_value = mapping.get(old_arg_value,
old_arg_value)
else:
new_arg_value = mapping(old_arg_value)
msg = ("the {old_name}={old_val!r} keyword is deprecated, "
"use {new_name}={new_val!r} instead"
).format(old_name=old_arg_name,
old_val=old_arg_value,
new_name=new_arg_name,
new_val=new_arg_value)
else:
new_arg_value = old_arg_value
msg = ("the '{old_name}' keyword is deprecated, "
"use '{new_name}' instead"
).format(old_name=old_arg_name,
new_name=new_arg_name)
warnings.warn(msg, FutureWarning, stacklevel=stacklevel)
if kwargs.get(new_arg_name, None) is not None:
msg = ("Can only specify '{old_name}' or '{new_name}', "
"not both").format(old_name=old_arg_name,
new_name=new_arg_name)
raise TypeError(msg)
else:
kwargs[new_arg_name] = new_arg_value
return func(*args, **kwargs)
return wrapper
return _deprecate_kwarg
def rewrite_axis_style_signature(name, extra_params):
def decorate(func):
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
kind = inspect.Parameter.POSITIONAL_OR_KEYWORD
params = [
inspect.Parameter('self', kind),
inspect.Parameter(name, kind, default=None),
inspect.Parameter('index', kind, default=None),
inspect.Parameter('columns', kind, default=None),
inspect.Parameter('axis', kind, default=None),
]
for pname, default in extra_params:
params.append(inspect.Parameter(pname, kind, default=default))
sig = inspect.Signature(params)
func.__signature__ = sig
return wrapper
return decorate
# Substitution and Appender are derived from matplotlib.docstring (1.1.0)
# module http://matplotlib.org/users/license.html
class Substitution:
"""
A decorator to take a function's docstring and perform string
substitution on it.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter)
Usage: construct a docstring.Substitution with a sequence or
dictionary suitable for performing substitution; then
decorate a suitable function with the constructed object. e.g.
sub_author_name = Substitution(author='Jason')
@sub_author_name
def some_function(x):
"%(author)s wrote this function"
# note that some_function.__doc__ is now "Jason wrote this function"
One can also use positional arguments.
sub_first_last_names = Substitution('Edgar Allen', 'Poe')
@sub_first_last_names
def some_function(x):
"%s %s wrote the Raven"
"""
def __init__(self, *args, **kwargs):
if (args and kwargs):
raise AssertionError("Only positional or keyword args are allowed")
self.params = args or kwargs
def __call__(self, func):
func.__doc__ = func.__doc__ and func.__doc__ % self.params
return func
def update(self, *args, **kwargs):
"""
Update self.params with supplied args.
If called, we assume self.params is a dict.
"""
self.params.update(*args, **kwargs)
@classmethod
def from_params(cls, params):
"""
In the case where the params is a mutable sequence (list or dictionary)
and it may change before this class is called, one may explicitly use a
reference to the params rather than using *args or **kwargs which will
copy the values and not reference them.
"""
result = cls()
result.params = params
return result
class Appender:
"""
A function decorator that will append an addendum to the docstring
of the target function.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter).
Usage: construct a docstring.Appender with a string to be joined to
the original docstring. An optional 'join' parameter may be supplied
which will be used to join the docstring and addendum. e.g.
add_copyright = Appender("Copyright (c) 2009", join='\n')
@add_copyright
def my_dog(has='fleas'):
"This docstring will have a copyright below"
pass
"""
def __init__(self, addendum, join='', indents=0):
if indents > 0:
self.addendum = indent(addendum, indents=indents)
else:
self.addendum = addendum
self.join = join
def __call__(self, func):
func.__doc__ = func.__doc__ if func.__doc__ else ''
self.addendum = self.addendum if self.addendum else ''
docitems = [func.__doc__, self.addendum]
func.__doc__ = dedent(self.join.join(docitems))
return func
def indent(text, indents=1):
if not text or not isinstance(text, str):
return ''
jointext = ''.join(['\n'] + [' '] * indents)
return jointext.join(text.split('\n'))
| {
"repo_name": "cbertinato/pandas",
"path": "pandas/util/_decorators.py",
"copies": "1",
"size": "11617",
"license": "bsd-3-clause",
"hash": -5527246498491193000,
"line_mean": 35.1900311526,
"line_max": 79,
"alpha_frac": 0.5853490574,
"autogenerated": false,
"ratio": 4.362373263236951,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5447722320636951,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
import collections
import warnings
import types
import sys
from ._dummy_key import _CmpDummyKey as _CmpDummyKey
RED_BLACK_TREE = 0
"""
Red-black tree algorithm indicator; good for general use.
"""
SPLAY_TREE = 1
"""
Splay tree algorithm indicator; good for temporal locality cases.
"""
SORTED_LIST = 2
"""
Sorted list algorithm indicator; good for infrequent updates.
"""
def _updator_metadata(set_, init_info):
if init_info.updator is None:
return None
name_clss = [(name, cls) for \
(name, cls) in inspect.getmembers(init_info.updator, predicate = inspect.isclass) if name == 'Metadata']
assert len(name_clss) == 1
cls = name_clss[0][1]
compare = init_info.compare if init_info.compare is not None else \
lambda x, y: -1 if x < y else (0 if x == y else 1)
def update(m, key, l, r):
m.update(
key,
init_info.key if init_info.key is not None else lambda k: _CmpDummyKey(compare, k),
l,
r)
return (cls, update)
def _adopt_updator_methods(self, updator):
def method_wrapper(f):
def wrapper(*args, **kwargs):
return f(self, *args, **kwargs)
return wraps(f)(wrapper)
if updator is None:
return
for name, method in inspect.getmembers(updator()):
if name.find('_') == 0 or name in self.__dict__ or name == 'Metadata':
continue
try:
method_ = method.__func__ if sys.version_info >= (3, 0) else method.im_func
self.__dict__[name] = method_wrapper(method_)
except AttributeError:
warnings.warn(name, RuntimeWarning)
_CommonInitInfo = collections.namedtuple(
'_CommonInitInfo',
['key_type', 'alg', 'key', 'compare', 'updator'],
verbose = False)
| {
"repo_name": "pyannote/pyannote-banyan",
"path": "banyan/_common_base.py",
"copies": "2",
"size": "1906",
"license": "bsd-3-clause",
"hash": -5718820225957598000,
"line_mean": 25.1095890411,
"line_max": 112,
"alpha_frac": 0.5939139559,
"autogenerated": false,
"ratio": 3.7299412915851273,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5323855247485126,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
import logging as log
import requests
import time
import sys
from qubell.api.private.exceptions import ApiError, api_http_code_errors
log.getLogger("requests.packages.urllib3.connectionpool").setLevel(log.ERROR)
_routes_stat = {}
def route(route_str): # decorator param
"""
Provides play2 likes routes, with python formatter
All string fileds should be named parameters
:param route_str: a route "GET /parent/{parentID}/child/{childId}{ctype}"
:return: the response of requests.request
"""
def ilog(elapsed):
#statistic
last_stat = _routes_stat.get(route_str, {"count": 0, "min": sys.maxint, "max": 0, "avg": 0})
last_count = last_stat["count"]
_routes_stat[route_str] = {
"count": last_count + 1,
"min": min(elapsed, last_stat["min"]),
"max": max(elapsed, last_stat["max"]),
"avg": (last_count * last_stat["avg"] + elapsed) / (last_count + 1)
}
#log.debug('Route Time: {0} took {1} ms'.format(route_str, elapsed))
def wrapper(f): # decorated function
@wraps(f)
def wrapped_func(*args, **kwargs): # params of function
self = args[0]
method, url = route_str.split(" ")
def defaults_dict():
f_args, varargs, keywords, defaults = inspect.getargspec(f)
defaults = defaults or []
return dict(zip(f_args[-len(defaults):], defaults))
defs = defaults_dict()
route_args = dict(defs.items() + kwargs.items())
def get_destination_url():
try:
return url.format(**route_args)
except KeyError as e:
raise AttributeError("Define {0} as named argument for route.".format(e)) # KeyError in format have a message with key
destination_url = self.base_url + get_destination_url()
f(*args, **kwargs) # generally this is "pass"
bypass_args = {param: route_args[param] for param in ["data", "cookies", "auth", "files", "content_type"] if param in route_args}
#add json content type for:
# - all public api, meaning have basic auth
# - private that ends with .json
# - unless files are sent
if "files" not in bypass_args and (destination_url.endswith('.json') or "auth" in route_args):
bypass_args['headers'] = {'Content-Type': 'application/json'}
if "content_type" in bypass_args and bypass_args['content_type']=="yaml":
del bypass_args["content_type"]
bypass_args['headers'] = {'Content-Type': 'application/x-yaml'}
start = time.time()
response = requests.request(method, destination_url, verify=self.verify_ssl, **bypass_args)
end = time.time()
elapsed = int((end - start) * 1000.0)
ilog(elapsed)
if self.verify_codes:
if response.status_code is not 200:
msg = "Route {0} {1} returned code={2} and error: {3}".format(method, get_destination_url(), response.status_code,
response.text)
if response.status_code in api_http_code_errors.keys():
raise api_http_code_errors[response.status_code](msg)
else:
log.debug(response.text)
raise ApiError(msg)
return response
return wrapped_func
return wrapper
def play_auth(f):
"""
Injects cookies, into requests call over route
:return: route
"""
def wrapper(*args, **kwargs):
self = args[0]
if "cookies" in kwargs:
raise AttributeError("don't set cookies explicitly")
assert self.is_connected, "not connected, call router.connect(email, password) first"
assert self._cookies, "no cookies and connected o_O"
kwargs["cookies"] = self._cookies
return f(*args, **kwargs)
return wrapper
def basic_auth(f):
"""
Injects auth, into requests call over route
:return: route
"""
def wrapper(*args, **kwargs):
self = args[0]
if "auth" in kwargs:
raise AttributeError("don't set auth token explicitly")
assert self.is_connected, "not connected, call router.connect(email, password) first"
assert self._auth, "no basic token and connected o_O"
kwargs["auth"] = self._auth
return f(*args, **kwargs)
return wrapper
def log_routes_stat():
nice_stat = [" count: {0:<4} min: {1:<6} avg: {2:<6} max: {3:<6} {4}".format(stat["count"], stat["min"], stat["avg"], stat["max"], r) for r, stat in _routes_stat.items()]
log.info("Route Statistic\n{0}".format("\n".join(nice_stat))) | {
"repo_name": "netvl/contrib-python-qubell-client",
"path": "qubell/api/provider/__init__.py",
"copies": "1",
"size": "4941",
"license": "apache-2.0",
"hash": 7370980527609340000,
"line_mean": 36.4393939394,
"line_max": 176,
"alpha_frac": 0.5674964582,
"autogenerated": false,
"ratio": 4.017073170731707,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5084569628931707,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
import logging
getargspec = None
if getattr(inspect, 'getfullargspec', None):
getargspec = inspect.getfullargspec
else:
# this one is deprecated in Python 3, but available in Python 2
getargspec = inspect.getargspec
class _Namespace:
pass
class Merry(object):
"""Initialze merry.
:param logger_name: the logger name to use. The default is ``'merry'``.
:param debug: set to ``True`` to enable debug mode, which causes all
errors to bubble up so that a debugger can catch them. The
default is ``False``.
"""
def __init__(self, logger_name='merry', debug=False):
self.logger = logging.getLogger(logger_name)
self.g = _Namespace()
self.debug = debug
self.except_ = {}
self.force_debug = []
self.force_handle = []
self.else_ = None
self.finally_ = None
def _try(self, f):
"""Decorator that wraps a function in a try block.
Example usage::
@merry._try
def my_function():
# do something here
"""
@wraps(f)
def wrapper(*args, **kwargs):
ret = None
try:
ret = f(*args, **kwargs)
# note that if the function returned something, the else clause
# will be skipped. This is a similar behavior to a normal
# try/except/else block.
if ret is not None:
return ret
except Exception as e:
# find the best handler for this exception
handler = None
for c in self.except_.keys():
if isinstance(e, c):
if handler is None or issubclass(c, handler):
handler = c
# if we don't have any handler, we let the exception bubble up
if handler is None:
raise e
# log exception
self.logger.exception('[merry] Exception caught')
# if in debug mode, then bubble up to let a debugger handle
debug = self.debug
if handler in self.force_debug:
debug = True
elif handler in self.force_handle:
debug = False
if debug:
raise e
# invoke handler
if len(getargspec(self.except_[handler])[0]) == 0:
return self.except_[handler]()
else:
return self.except_[handler](e)
else:
# if we have an else handler, call it now
if self.else_ is not None:
return self.else_()
finally:
# if we have a finally handler, call it now
if self.finally_ is not None:
alt_ret = self.finally_()
if alt_ret is not None:
ret = alt_ret
return ret
return wrapper
def _except(self, *args, **kwargs):
"""Decorator that registers a function as an error handler for one or
more exception classes.
Example usage::
@merry._except(RuntimeError)
def runtime_error_handler(e):
print('runtime error:', str(e))
:param args: the list of exception classes to be handled by the
decorated function.
:param kwargs: configuration arguments. Pass ``debug=True`` to enable
debug mode for this handler, which bubbles up all
exceptions. Pass ``debug=False`` to prevent the error
from bubbling up, even if debug mode is enabled
globally.
"""
def decorator(f):
for e in args:
self.except_[e] = f
d = kwargs.get('debug', None)
if d:
self.force_debug.append(e)
elif d is not None:
self.force_handle.append(e)
return f
return decorator
def _else(self, f):
"""Decorator to define the ``else`` clause handler.
Example usage::
@merry._else
def else_handler():
print('no exceptions were raised')
"""
self.else_ = f
return f
def _finally(self, f):
"""Decorator to define the ``finally`` clause handler.
Example usage::
@merry._finally
def finally_handler():
print('clean up')
"""
self.finally_ = f
return f
| {
"repo_name": "miguelgrinberg/merry",
"path": "src/merry/__init__.py",
"copies": "1",
"size": "4756",
"license": "mit",
"hash": -4257574610880254500,
"line_mean": 31.1351351351,
"line_max": 79,
"alpha_frac": 0.4993692178,
"autogenerated": false,
"ratio": 4.959332638164755,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5958701855964754,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
import random
import copy
from axelrod import Actions, flip_action
from .game import DefaultGame
C, D = Actions.C, Actions.D
# Strategy classifiers
def is_basic(s):
"""
Defines criteria for a strategy to be considered 'basic'
"""
stochastic = s.classifier['stochastic']
depth = s.classifier['memory_depth']
inspects_source = s.classifier['inspects_source']
manipulates_source = s.classifier['manipulates_source']
manipulates_state = s.classifier['manipulates_state']
return (not stochastic) and (not inspects_source) and (not manipulates_source) and (not manipulates_state) and (depth in (0, 1))
def obey_axelrod(s):
"""
A function to check if a strategy obeys Axelrod's original tournament rules.
"""
classifier = s.classifier
return not (classifier['inspects_source'] or\
classifier['manipulates_source'] or\
classifier['manipulates_state'])
def update_history(player, move):
"""Updates histories and cooperation / defections counts following play."""
# Update histories
player.history.append(move)
# Update player counts of cooperation and defection
if move == C:
player.cooperations += 1
elif move == D:
player.defections += 1
def init_args(func):
"""Decorator to simplify the handling of init_args. Use whenever overriding
Player.__init__ in subclasses of Player that require arguments as follows:
@init_args
def __init__(self, myarg1, ...)
"""
@wraps(func)
def wrapper(self, *args, **kwargs):
r = func(self, *args, **kwargs)
self.init_args = args
return r
return wrapper
class Player(object):
"""A class for a player in the tournament.
This is an abstract base class, not intended to be used directly.
"""
name = "Player"
classifier = {}
default_classifier = {
'stochastic': False,
'memory_depth': float('inf'),
'makes_use_of': None,
'inspects_source': None,
'manipulates_source': None,
'manipulates_state': None
}
def __init__(self):
"""Initiates an empty history and 0 score for a player."""
self.history = []
self.classifier = copy.deepcopy(self.classifier)
if self.name == "Player":
self.classifier['stochastic'] = False
for dimension in self.default_classifier:
if dimension not in self.classifier:
self.classifier[dimension] = self.default_classifier[dimension]
self.cooperations = 0
self.defections = 0
self.init_args = ()
self.set_match_attributes()
def receive_match_attributes(self):
# Overwrite this function if your strategy needs
# to make use of match_attributes such as
# the game matrix, the number of rounds or the noise
pass
def set_match_attributes(self, length=-1, game=None, noise=0):
if not game:
game = DefaultGame
self.match_attributes = {
"length": length,
"game": game,
"noise": noise
}
self.receive_match_attributes()
def __repr__(self):
"""The string method for the strategy."""
return self.name
@staticmethod
def _add_noise(noise, s1, s2):
r = random.random()
if r < noise:
s1 = flip_action(s1)
r = random.random()
if r < noise:
s2 = flip_action(s2)
return s1, s2
def strategy(self, opponent):
"""This is a placeholder strategy."""
raise NotImplementedError()
def play(self, opponent, noise=0):
"""This pits two players against each other."""
s1, s2 = self.strategy(opponent), opponent.strategy(self)
if noise:
s1, s2 = self._add_noise(noise, s1, s2)
update_history(self, s1)
update_history(opponent, s2)
def clone(self):
"""Clones the player without history, reapplying configuration
parameters as necessary."""
# You may be tempted to reimplement using the `copy` module
# Note that this would require a deepcopy in some cases and there may
# be significant changes required throughout the library.
# Consider overriding in special cases only if necessary
cls = self.__class__
new_player = cls(*self.init_args)
new_player.match_attributes = copy.copy(self.match_attributes)
return new_player
def reset(self):
"""Resets history.
When creating strategies that create new attributes then this method should be
re-written (in the inherited class) and should not only reset history but also
rest all other attributes.
"""
self.history = []
self.cooperations = 0
self.defections = 0
| {
"repo_name": "marcharper/Axelrod",
"path": "axelrod/player.py",
"copies": "1",
"size": "4889",
"license": "mit",
"hash": 6629637244913780000,
"line_mean": 30.3397435897,
"line_max": 132,
"alpha_frac": 0.6205768051,
"autogenerated": false,
"ratio": 4.125738396624473,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5246315201724473,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
import re
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import user_passes_test
from django.core.exceptions import PermissionDenied
from django.db.models import Model, get_model
from django.http import HttpResponseForbidden, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from kuma.core.urlresolvers import reverse
def user_access_decorator(redirect_func, redirect_url_func, deny_func=None,
redirect_field=REDIRECT_FIELD_NAME):
"""
Helper function that returns a decorator.
* redirect func ----- If truthy, a redirect will occur
* deny_func --------- If truthy, HttpResponseForbidden is returned.
* redirect_url_func - Evaluated at view time, returns the redirect URL
i.e. where to go if redirect_func is truthy.
* redirect_field ---- What field to set in the url, defaults to Django's.
Set this to None to exclude it from the URL.
"""
def decorator(view_fn):
def _wrapped_view(request, *args, **kwargs):
if redirect_func(request.user):
# We must call reverse at the view level, else the threadlocal
# locale prefixing doesn't take effect.
redirect_url = redirect_url_func() or reverse('account_login')
# Redirect back here afterwards?
if redirect_field:
path = urlquote(request.get_full_path())
redirect_url = '%s?%s=%s' % (
redirect_url, redirect_field, path)
return HttpResponseRedirect(redirect_url)
if deny_func and deny_func(request.user):
return HttpResponseForbidden()
return view_fn(request, *args, **kwargs)
return wraps(view_fn, assigned=available_attrs(view_fn))(_wrapped_view)
return decorator
def logout_required(redirect):
"""Requires that the user *not* be logged in."""
redirect_func = lambda u: u.is_authenticated()
if hasattr(redirect, '__call__'):
return user_access_decorator(
redirect_func, redirect_field=None,
redirect_url_func=lambda: reverse('home'))(redirect)
else:
return user_access_decorator(redirect_func, redirect_field=None,
redirect_url_func=lambda: redirect)
def login_required(func, login_url=None, redirect=REDIRECT_FIELD_NAME,
only_active=True):
"""Requires that the user is logged in."""
if only_active:
redirect_func = lambda u: not (u.is_authenticated() and u.is_active)
else:
redirect_func = lambda u: not u.is_authenticated()
redirect_url_func = lambda: login_url
return user_access_decorator(redirect_func, redirect_field=redirect,
redirect_url_func=redirect_url_func)(func)
def permission_required(perm, login_url=None, redirect=REDIRECT_FIELD_NAME,
only_active=True):
"""A replacement for django.contrib.auth.decorators.permission_required
that doesn't ask authenticated users to log in."""
redirect_func = lambda u: not u.is_authenticated()
if only_active:
deny_func = lambda u: not (u.is_active and u.has_perm(perm))
else:
deny_func = lambda u: not u.has_perm(perm)
redirect_url_func = lambda: login_url
return user_access_decorator(redirect_func, redirect_field=redirect,
redirect_url_func=redirect_url_func,
deny_func=deny_func)
def _resolve_lookup((model, lookup, arg_name), view_kwargs):
"""Return the object indicated by the lookup triple and the kwargs passed
to the view.
"""
value = view_kwargs.get(arg_name)
if value is None:
raise ValueError("Expected kwarg '%s' not found." % arg_name)
if isinstance(model, basestring):
model_class = get_model(*model.split('.'))
else:
model_class = model
if model_class is None:
raise ValueError("The given argument '%s' is not a valid model." %
model)
if inspect.isclass(model_class) and not issubclass(model_class, Model):
raise ValueError("The argument '%s' needs to be a model." % model)
return get_object_or_404(model_class, **{lookup: value})
# django never_cache isn't as thorough as we might like
# http://stackoverflow.com/a/2095648/571420
# http://stackoverflow.com/a/2068407/571420
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
def never_cache(view_func):
def _wrapped_view_func(request, *args, **kwargs):
resp = view_func(request, *args, **kwargs)
resp['Cache-Control'] = 'no-cache, no-store, must-revalidate'
resp['Pragma'] = 'no-cache'
resp['Expires'] = '0'
return resp
return _wrapped_view_func
def is_superuser(u):
if u.is_authenticated():
if u.is_superuser:
return True
raise PermissionDenied
return False
superuser_required = user_passes_test(is_superuser)
#: A decorator to use for requiring a superuser
def block_user_agents(view_func):
blockable_user_agents = getattr(settings, 'BLOCKABLE_USER_AGENTS', [])
blockable_ua_patterns = []
for agent in blockable_user_agents:
blockable_ua_patterns.append(re.compile(agent))
def agent_blocked_view(request, *args, **kwargs):
http_user_agent = request.META.get('HTTP_USER_AGENT', None)
if http_user_agent is not None:
for pattern in blockable_ua_patterns:
if pattern.search(request.META['HTTP_USER_AGENT']):
return HttpResponseForbidden()
return view_func(request, *args, **kwargs)
return wraps(view_func,
assigned=available_attrs(view_func))(agent_blocked_view)
| {
"repo_name": "surajssd/kuma",
"path": "kuma/core/decorators.py",
"copies": "13",
"size": "6063",
"license": "mpl-2.0",
"hash": 8322105994741524000,
"line_mean": 37.3734177215,
"line_max": 79,
"alpha_frac": 0.6394524163,
"autogenerated": false,
"ratio": 4.0285714285714285,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
import sys
from .dogpile import cache_get
from .dogpile import cache_set
PY3 = sys.version_info[0] == 3
def default_key_fun_impl(fun, *args, **kwargs):
name = fun.__name__
mod = fun.__module__
call_args = inspect.getcallargs(fun, *args, **kwargs)
return "%s-%s-%s" % (name, mod, '-'.join(
["%s-%s" % (k, call_args[k]) for k in sorted(iterkeys(call_args))]))
def cacheable(cache, key=None, ttl=60, is_enabled=True):
"""
Decorator for cacheable function
"""
def decorator(fxn):
if callable(key):
key_fun = key
else:
key_fun = default_key_fun_impl if key is None else \
lambda fxn, *args, **kwargs: key
@wraps(fxn)
def wrapper(*args, **kwargs):
if is_enabled:
key = key_fun(fxn, *args, **kwargs)
data = cache_get(cache, key)
if data is None:
data = fxn(*args, **kwargs)
cache_set(cache, key, data, ttl)
return data
else:
return fxn(*args, **kwargs)
return wrapper
return decorator
def iterkeys(d, **kw):
if PY3:
return iter(d.keys(**kw))
else:
return d.iterkeys(**kw)
| {
"repo_name": "ryankanno/django-utilities",
"path": "django_utilities/cache/decorators.py",
"copies": "1",
"size": "1310",
"license": "mit",
"hash": -4278481537653593000,
"line_mean": 25.2,
"line_max": 76,
"alpha_frac": 0.5290076336,
"autogenerated": false,
"ratio": 3.569482288828338,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45984899224283377,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
from .annotate import Annotate, ObjDescr, Cache, Set
class Observable:
""" Observable mixing so others can subscribe to an object """
def __init__(self, *args, **kws):
super().__init__(*args, **kws)
self.observers = []
def notify(self, event):
for obs in self.observers:
obs.notify(event)
def subscribe(self, observer):
self.observers.append(observer)
def unsubscribe(self, observer):
self.observers.remove(observer)
def emitting(f):
""" emit event on method call """
spec = inspect.getfullargspec(f)
@wraps(f)
def emit(self, *args, **kws):
result = f(self, *args, **kws)
event = Event(f, spec, result, self, args, kws)
self.notify(event)
return result
emit.__event__ = f.__name__
return emit
class Event:
def __init__(self, f, spec, result, obj, args, kws):
self.f = f
self.spec = spec
self.obj = obj
self.args = args
self.kws = kws
self.result = result
@property
def __name__(self):
return self.f.__name__
def __getattr__(self, name):
spec = self.spec
args = self.args + (spec.defaults or ())
# TODO varargs, varkw
for dct in [self.kws,
# don't bind self arg
dict(zip(spec.args[1:], args)),
spec.kwonlydefaults or {}]:
try:
return dct[name]
except KeyError:
pass
else:
raise AttributeError('%r object has no attribute %r' % (type(self), name))
def __str__(self):
return '{}-event' % self.__name__
def __repr__(self):
# don't bind self arg
args = self.spec.args[1:] + self.spec.kwonlyargs
# TODO varargs, varkw
return '{}({})->{!r}'.format(self.__name__, ', '.join('{}={!r}'.format(name, getattr(self, name))
for name in args), self.result)
class observes(Annotate, ObjDescr, Cache, Set):
"""
annotation for an observable that an observer watches
Example:
========
>>> class Model(Observable):
... @emitting
... def foo(self, num, suf='bar'):
... return str(num)+suf
...
... def __str__(self):
... return 'foo-model'
...
... class View:
... def __init__(self, model):
... self.model = model
...
... @observes
... def model(self, model : Model):
... print("we are watching {}".format(model))
...
... @model.foo
... def on_foo(self, event):
... print("{} emitting {!r}".format(self.model, event))
...
... m = Model()
... v = View(m)
... m.foo(42)
we are watching foo-model
foo-model emitting foo(num=42, suf='bar')->'42bar'
"""
def __init__(self, definition):
super().__init__(definition)
try:
self.typ, = definition.__annotations__.values()
except ValueError:
raise ValueError('@observes defintion should contain a type annotation')
self.subscriptions = {}
def __set__(self, obj, obs):
super().__set__(obj, obs)
self.definition(obj, obs)
if self.subscriptions:
class SubsCaller:
@classmethod
def notify(call, event):
subs = self.subscriptions.get(event.__name__)
if subs:
subs(obj, event)
obs.subscribe(SubsCaller())
def __getattr__(self, name):
event = getattr(self.typ, name)
if not hasattr(event, '__event__'):
raise ValueError('%r is not an observable event of the model')
def subscribe(f):
self.subscriptions[name] = f
return f
return subscribe
| {
"repo_name": "wabu/pyadds",
"path": "pyadds/observe.py",
"copies": "1",
"size": "3979",
"license": "mit",
"hash": 3836000373971468000,
"line_mean": 26.6319444444,
"line_max": 105,
"alpha_frac": 0.506157326,
"autogenerated": false,
"ratio": 4.0560652395514785,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.007245355686752832,
"num_lines": 144
} |
from functools import wraps
import inspect
from django.apps import apps
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.db.models import Model
from django.http import HttpResponseForbidden, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from kitsune.access import utils as access
from kitsune.sumo.urlresolvers import reverse
def user_access_decorator(redirect_func, redirect_url_func, deny_func=None,
redirect_field=REDIRECT_FIELD_NAME):
"""
Helper function that returns a decorator.
* redirect func ----- If truthy, a redirect will occur
* deny_func --------- If truthy, HttpResponseForbidden is returned.
* redirect_url_func - Evaluated at view time, returns the redirect URL
i.e. where to go if redirect_func is truthy.
* redirect_field ---- What field to set in the url, defaults to Django's.
Set this to None to exclude it from the URL.
"""
def decorator(view_fn):
def _wrapped_view(request, *args, **kwargs):
redirect = redirect_func(request.user)
if redirect and not request.is_ajax():
# We must call reverse at the view level, else the threadlocal
# locale prefixing doesn't take effect.
redirect_url = redirect_url_func() or reverse('users.login')
# Redirect back here afterwards?
if redirect_field:
path = urlquote(request.get_full_path())
redirect_url = '%s?%s=%s' % (
redirect_url, redirect_field, path)
return HttpResponseRedirect(redirect_url)
elif ((redirect and request.is_ajax()) or
(deny_func and deny_func(request.user))):
return HttpResponseForbidden()
return view_fn(request, *args, **kwargs)
return wraps(view_fn, assigned=available_attrs(view_fn))(_wrapped_view)
return decorator
def logout_required(redirect):
"""Requires that the user *not* be logged in."""
def redirect_func(user):
return user.is_authenticated()
if hasattr(redirect, '__call__'):
return user_access_decorator(
redirect_func, redirect_field=None,
redirect_url_func=lambda: reverse('home'))(redirect)
else:
return user_access_decorator(redirect_func, redirect_field=None,
redirect_url_func=lambda: redirect)
def login_required(func, login_url=None, redirect=REDIRECT_FIELD_NAME,
only_active=True):
"""Requires that the user is logged in."""
if only_active:
def redirect_func(user):
return not (user.is_authenticated() and user.is_active)
else:
def redirect_func(user):
return not user.is_authenticated()
return user_access_decorator(redirect_func, redirect_field=redirect,
redirect_url_func=lambda: login_url)(func)
def permission_required(perm, login_url=None, redirect=REDIRECT_FIELD_NAME,
only_active=True):
"""A replacement for django.contrib.auth.decorators.permission_required
that doesn't ask authenticated users to log in."""
if only_active:
def deny_func(user):
return not (user.is_active and user.has_perm(perm))
else:
def deny_func(user):
return not user.has_perm(perm)
return user_access_decorator(lambda u: not u.is_authenticated(),
redirect_field=redirect,
redirect_url_func=lambda: login_url,
deny_func=deny_func)
def has_perm_or_owns_or_403(perm, owner_attr, obj_lookup, perm_obj_lookup,
**kwargs):
"""Act like permission_required_or_403 but also grant permission to owners.
Arguments:
perm: authority permission to check, e.g. 'forums_forum.edit_forum'
owner_attr: Attr of model object that references the owner
obj_lookup: Triple that specifies a lookup to the object on which
ownership should be compared. Items in the tuple are...
(model class or import path thereof,
kwarg name specifying field and comparator (e.g. 'id__exact'),
name of kwarg containing the value to which to compare)
perm_obj_lookup: Triple that specifies a lookup to the object on which
to check for permission. Elements of the tuple are as in
obj_lookup.
"""
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
# based on authority/decorators.py
user = request.user
if user.is_authenticated():
obj = _resolve_lookup(obj_lookup, kwargs)
perm_obj = _resolve_lookup(perm_obj_lookup, kwargs)
granted = access.has_perm_or_owns(user, perm, obj, perm_obj,
owner_attr)
if granted or user.has_perm(perm):
return view_func(request, *args, **kwargs)
# In all other cases, permission denied
return HttpResponseForbidden()
return wraps(view_func)(_wrapped_view)
return decorator
def _resolve_lookup((model, lookup, arg_name), view_kwargs):
"""Return the object indicated by the lookup triple and the kwargs passed
to the view.
"""
value = view_kwargs.get(arg_name)
if value is None:
raise ValueError("Expected kwarg '%s' not found." % arg_name)
if isinstance(model, basestring):
model_class = apps.get_model(*model.split('.'))
else:
model_class = model
if model_class is None:
raise ValueError("The given argument '%s' is not a valid model." %
model)
if inspect.isclass(model_class) and not issubclass(model_class, Model):
raise ValueError("The argument '%s' needs to be a model." % model)
return get_object_or_404(model_class, **{lookup: value})
| {
"repo_name": "anushbmx/kitsune",
"path": "kitsune/access/decorators.py",
"copies": "1",
"size": "6230",
"license": "bsd-3-clause",
"hash": -3773355657351788500,
"line_mean": 38.9358974359,
"line_max": 79,
"alpha_frac": 0.6110754414,
"autogenerated": false,
"ratio": 4.375,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.54860754414,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import user_passes_test
from django.core.exceptions import PermissionDenied
from django.db.models import Model, get_model
from django.http import HttpResponseForbidden, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from kuma.core.urlresolvers import reverse
def user_access_decorator(redirect_func, redirect_url_func, deny_func=None,
redirect_field=REDIRECT_FIELD_NAME):
"""
Helper function that returns a decorator.
* redirect func ----- If truthy, a redirect will occur
* deny_func --------- If truthy, HttpResponseForbidden is returned.
* redirect_url_func - Evaluated at view time, returns the redirect URL
i.e. where to go if redirect_func is truthy.
* redirect_field ---- What field to set in the url, defaults to Django's.
Set this to None to exclude it from the URL.
"""
def decorator(view_fn):
def _wrapped_view(request, *args, **kwargs):
if redirect_func(request.user):
# We must call reverse at the view level, else the threadlocal
# locale prefixing doesn't take effect.
redirect_url = redirect_url_func() or reverse('account_login')
# Redirect back here afterwards?
if redirect_field:
path = urlquote(request.get_full_path())
redirect_url = '%s?%s=%s' % (
redirect_url, redirect_field, path)
return HttpResponseRedirect(redirect_url)
if deny_func and deny_func(request.user):
return HttpResponseForbidden()
return view_fn(request, *args, **kwargs)
return wraps(view_fn, assigned=available_attrs(view_fn))(_wrapped_view)
return decorator
def logout_required(redirect):
"""Requires that the user *not* be logged in."""
redirect_func = lambda u: u.is_authenticated()
if hasattr(redirect, '__call__'):
return user_access_decorator(
redirect_func, redirect_field=None,
redirect_url_func=lambda: reverse('home'))(redirect)
else:
return user_access_decorator(redirect_func, redirect_field=None,
redirect_url_func=lambda: redirect)
def login_required(func, login_url=None, redirect=REDIRECT_FIELD_NAME,
only_active=True):
"""Requires that the user is logged in."""
if only_active:
redirect_func = lambda u: not (u.is_authenticated() and u.is_active)
else:
redirect_func = lambda u: not u.is_authenticated()
redirect_url_func = lambda: login_url
return user_access_decorator(redirect_func, redirect_field=redirect,
redirect_url_func=redirect_url_func)(func)
def permission_required(perm, login_url=None, redirect=REDIRECT_FIELD_NAME,
only_active=True):
"""A replacement for django.contrib.auth.decorators.permission_required
that doesn't ask authenticated users to log in."""
redirect_func = lambda u: not u.is_authenticated()
if only_active:
deny_func = lambda u: not (u.is_active and u.has_perm(perm))
else:
deny_func = lambda u: not u.has_perm(perm)
redirect_url_func = lambda: login_url
return user_access_decorator(redirect_func, redirect_field=redirect,
redirect_url_func=redirect_url_func,
deny_func=deny_func)
def _resolve_lookup((model, lookup, arg_name), view_kwargs):
"""Return the object indicated by the lookup triple and the kwargs passed
to the view.
"""
value = view_kwargs.get(arg_name)
if value is None:
raise ValueError("Expected kwarg '%s' not found." % arg_name)
if isinstance(model, basestring):
model_class = get_model(*model.split('.'))
else:
model_class = model
if model_class is None:
raise ValueError("The given argument '%s' is not a valid model." %
model)
if inspect.isclass(model_class) and not issubclass(model_class, Model):
raise ValueError("The argument '%s' needs to be a model." % model)
return get_object_or_404(model_class, **{lookup: value})
# django never_cache isn't as thorough as we might like
# http://stackoverflow.com/a/2095648/571420
# http://stackoverflow.com/a/2068407/571420
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
def never_cache(view_func):
def _wrapped_view_func(request, *args, **kwargs):
resp = view_func(request, *args, **kwargs)
resp['Cache-Control'] = 'no-cache, no-store, must-revalidate'
resp['Pragma'] = 'no-cache'
resp['Expires'] = '0'
return resp
return _wrapped_view_func
def is_superuser(u):
if u.is_authenticated():
if u.is_superuser:
return True
raise PermissionDenied
return False
superuser_required = user_passes_test(is_superuser)
#: A decorator to use for requiring a superuser
| {
"repo_name": "chirilo/kuma",
"path": "kuma/core/decorators.py",
"copies": "13",
"size": "5296",
"license": "mpl-2.0",
"hash": 6766798111961048000,
"line_mean": 37.3768115942,
"line_max": 79,
"alpha_frac": 0.6378398792,
"autogenerated": false,
"ratio": 4.061349693251533,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00122229159752695,
"num_lines": 138
} |
from functools import wraps
import inspect
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.db.models import Model, get_model
from django.http import HttpResponseForbidden, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from kitsune import access
from kitsune.sumo.urlresolvers import reverse
def user_access_decorator(redirect_func, redirect_url_func, deny_func=None,
redirect_field=REDIRECT_FIELD_NAME):
"""
Helper function that returns a decorator.
* redirect func ----- If truthy, a redirect will occur
* deny_func --------- If truthy, HttpResponseForbidden is returned.
* redirect_url_func - Evaluated at view time, returns the redirect URL
i.e. where to go if redirect_func is truthy.
* redirect_field ---- What field to set in the url, defaults to Django's.
Set this to None to exclude it from the URL.
"""
def decorator(view_fn):
def _wrapped_view(request, *args, **kwargs):
redirect = redirect_func(request.user)
if redirect and not request.is_ajax():
# We must call reverse at the view level, else the threadlocal
# locale prefixing doesn't take effect.
redirect_url = redirect_url_func() or reverse('users.login')
# Redirect back here afterwards?
if redirect_field:
path = urlquote(request.get_full_path())
redirect_url = '%s?%s=%s' % (
redirect_url, redirect_field, path)
return HttpResponseRedirect(redirect_url)
elif ((redirect and request.is_ajax()) or
(deny_func and deny_func(request.user))):
return HttpResponseForbidden()
return view_fn(request, *args, **kwargs)
return wraps(view_fn, assigned=available_attrs(view_fn))(_wrapped_view)
return decorator
def logout_required(redirect):
"""Requires that the user *not* be logged in."""
redirect_func = lambda u: u.is_authenticated()
if hasattr(redirect, '__call__'):
return user_access_decorator(
redirect_func, redirect_field=None,
redirect_url_func=lambda: reverse('home'))(redirect)
else:
return user_access_decorator(redirect_func, redirect_field=None,
redirect_url_func=lambda: redirect)
def login_required(func, login_url=None, redirect=REDIRECT_FIELD_NAME,
only_active=True):
"""Requires that the user is logged in."""
if only_active:
redirect_func = lambda u: not (u.is_authenticated() and u.is_active)
else:
redirect_func = lambda u: not u.is_authenticated()
redirect_url_func = lambda: login_url
return user_access_decorator(redirect_func, redirect_field=redirect,
redirect_url_func=redirect_url_func)(func)
def permission_required(perm, login_url=None, redirect=REDIRECT_FIELD_NAME,
only_active=True):
"""A replacement for django.contrib.auth.decorators.permission_required
that doesn't ask authenticated users to log in."""
redirect_func = lambda u: not u.is_authenticated()
if only_active:
deny_func = lambda u: not (u.is_active and u.has_perm(perm))
else:
deny_func = lambda u: not u.has_perm(perm)
redirect_url_func = lambda: login_url
return user_access_decorator(redirect_func, redirect_field=redirect,
redirect_url_func=redirect_url_func,
deny_func=deny_func)
def has_perm_or_owns_or_403(perm, owner_attr, obj_lookup, perm_obj_lookup,
**kwargs):
"""Act like permission_required_or_403 but also grant permission to owners.
Arguments:
perm: authority permission to check, e.g. 'forums_forum.edit_forum'
owner_attr: Attr of model object that references the owner
obj_lookup: Triple that specifies a lookup to the object on which
ownership should be compared. Items in the tuple are...
(model class or import path thereof,
kwarg name specifying field and comparator (e.g. 'id__exact'),
name of kwarg containing the value to which to compare)
perm_obj_lookup: Triple that specifies a lookup to the object on which
to check for permission. Elements of the tuple are as in
obj_lookup.
"""
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
# based on authority/decorators.py
user = request.user
if user.is_authenticated():
obj = _resolve_lookup(obj_lookup, kwargs)
perm_obj = _resolve_lookup(perm_obj_lookup, kwargs)
granted = access.has_perm_or_owns(user, perm, obj, perm_obj,
owner_attr)
if granted or user.has_perm(perm):
return view_func(request, *args, **kwargs)
# In all other cases, permission denied
return HttpResponseForbidden()
return wraps(view_func)(_wrapped_view)
return decorator
def _resolve_lookup((model, lookup, arg_name), view_kwargs):
"""Return the object indicated by the lookup triple and the kwargs passed
to the view.
"""
value = view_kwargs.get(arg_name)
if value is None:
raise ValueError("Expected kwarg '%s' not found." % arg_name)
if isinstance(model, basestring):
model_class = get_model(*model.split('.'))
else:
model_class = model
if model_class is None:
raise ValueError("The given argument '%s' is not a valid model." %
model)
if inspect.isclass(model_class) and not issubclass(model_class, Model):
raise ValueError("The argument '%s' needs to be a model." % model)
return get_object_or_404(model_class, **{lookup: value})
| {
"repo_name": "dbbhattacharya/kitsune",
"path": "kitsune/access/decorators.py",
"copies": "1",
"size": "6167",
"license": "bsd-3-clause",
"hash": -7837029995147165000,
"line_mean": 40.1133333333,
"line_max": 79,
"alpha_frac": 0.6173179828,
"autogenerated": false,
"ratio": 4.291579679888657,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5408897662688656,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import inspect
from peeringdb.resource import RESOURCES_BY_TAG
def reftag_to_cls(fn):
"""
decorator that checks function arguments for `concrete` and `resource`
and will properly set them to class references if a string (reftag) is
passed as the value
"""
spec = inspect.getfullargspec(fn)
names, values = spec.args, spec.defaults
@wraps(fn)
def wrapped(*args, **kwargs):
i = 0
backend = args[0]
for name in names[1:]:
value = args[i]
if name == "concrete" and isinstance(value, str):
args[i] = backend.REFTAG_CONCRETE[value]
elif name == "resource" and isinstance(value, str):
args[i] = backend.REFTAG_RESOURCE[value]
i += 1
return fn(*args, **kwargs)
return wrapped
class Field:
"""
We use this to provide field instances to backends that
don't use classes to describe their fields
"""
def __init__(self, name):
self.name = name
self.column = column
class EmptyContext:
"""
We use this to provide a dummy context wherever it's optional
"""
def __enter__(self):
pass
def __exit__(self, *args):
pass
class Base:
"""
Backend base class.
Do NOT extend this directly when implementing a new backend, instead
extend Interface below.
"""
# Handleref tag to resource class mapping
REFTAG_RESOURCE = RESOURCES_BY_TAG
@property
def CONCRETE_MAP(self):
if not hasattr(self, "_CONCRETE_MAP"):
self._CONCRETE_MAP = {
concrete: res for (res, concrete) in self.RESOURCE_MAP.items()
}
return self._CONCRETE_MAP
def get_concrete(self, res):
"""
returns the concrete class for the resource
Returns:
- concrete class
"""
return self.RESOURCE_MAP[res]
def is_concrete(self, cls):
"""
check if concrete class exists in the resource -> concrete mapping
Returns:
- bool: True if class exists in the resource -> concrete mapping
"""
return cls in self.CONCRETE_MAP
def get_resource(self, cls):
"""
returns the resource class for the concrete class
Returns:
- resource class
"""
return self.CONCRETE_MAP[cls]
class Interface(Base):
"""
backend adapter interface
extend this when making a new backend
"""
# Resource class to concrete class mapping
# should go in here
RESOURCE_MAP = {}
# Handleref tag to concrete class mapping
# should go in here
REFTAG_CONCRETE = {}
@classmethod
def validation_error(cls, concrete=None):
"""
should return the exception class that will
be raised when an object fails validation
Arguments:
- concrete: if your backend has class specific validation
errors and this is set, return the exception class that would be
raised for this concrete class.
Returns:
- Exception class
"""
return Exception
@classmethod
def object_missing_error(cls, concrete=None):
"""
should return the exception class that will
be raised when an object cannot be found
Arguments:
- concrete: if your backend has class specific object missing
errors and this is set, return the exception class that would be
raised for this concrete class.
Returns:
- Exception class
"""
return Exception
@classmethod
def atomic_transaction(cls):
"""
Allows you to return an atomic transaction context
if your backend supports it, if it does not, leave as is
This should never return None
Returns:
- python context instance
"""
return EmptyContext()
@classmethod
def setup(cls):
"""
operations that need to be done ONCE during runtime
to prepare usage for the backend
"""
pass
# INTERFACE (REQUIRED)
# The following methods are required to be overwritten in
# your backend and will raise a NotImplementedError if
# they are not.
#
# when overriding make sure you also apply the `reftag_to_cls`
# decorator on the methods that need it
@reftag_to_cls
def create_object(self, concrete, **data):
"""
should create object from dict and return it
Arguments:
- concrete: concrete class
Keyword Arguments:
- object field names -> values
"""
raise NotImplementedError()
# TODO:
def delete_all(self):
"""
Delete all objects, essentially empty the database
"""
raise NotImplementedError()
def detect_missing_relations(self, obj, exc):
"""
Should parse error messages and collect the missing relationship
errors as a dict of Resource -> {id set} and return it
Arguments:
- obj: concrete object instance
- exc: exception instance
Returns:
- dict: {Resource : [ids]}
"""
raise NotImplementedError()
def detect_uniqueness_error(self, exc):
"""
Should parse error message and collect any that describe violations
of a uniqueness constraint.
return the curresponding fields, else None
Arguments:
- exc: exception instance
Returns:
- list: list of fields
- None: if no uniqueness errors
"""
raise NotImplementedError()
@reftag_to_cls
def get_field_names(self, concrete):
"""
Should return a list of field names for the concrete class
Arguments:
- concrete: concrete class
Returns:
- list: [<str>,...]
"""
raise NotImplementedError()
@reftag_to_cls
def get_field_concrete(self, concrete, field_name):
"""
Return concrete class for relationship by field name
Arguments:
- concrete: concrete class
- field_name
Returns:
- concrete class
"""
raise NotImplementedError()
@reftag_to_cls
def get_object(self, concrete, id):
"""
should return instance of object with matching id
Arguments:
- concrete: concrete class
- id: object primary key value
Returns:
- concrete instance
"""
raise NotImplementedError()
@reftag_to_cls
def get_object_by(self, concrete, field_name, value):
"""
very simply search function that should return
collection of objects where field matches value
Arguments:
- concrete: concrete class
- field_name: query this field for a match
- value: match this value (simple equal matching)
Returns:
- concrete instance
"""
raise NotImplementedError()
@reftag_to_cls
def get_objects(self, concrete, ids=None):
"""
should return collection of objects
Arguments:
- concrete: concrete class
- ids: if specified should be a list of primary
key values and only objects matching those
values should be returned
Returns:
- collection of concrete instances
"""
raise NotImplementedError()
@reftag_to_cls
def get_objects_by(self, concrete, field, value):
"""
very simple search function that should return
collection of objects where field matches value
Arguments:
- concrete: concrete class
- field_name: query this field for a match
- value: match this value (simple equal matching)
Returns:
- collection of concrete instances
"""
raise NotImplementedError()
@reftag_to_cls
def is_field_related(self, concrete, field_name):
"""
Should return a tuple containing bools on whether
a field signifies a relationship and if it's a single
relationship or a relationship to multiple objects
Arguments:
- concrete: concrete class
- field_name: query this field for a match
Returns:
- tuple: (bool related, bool many)
"""
raise NotImplementedError()
@reftag_to_cls
def last_change(self, concrete):
"""
should return unix epoch timestamp of the `updated` field
of the most recently updated object
Arguments:
- concrete: concrete class
Returns:
- int
"""
raise NotImplementedError()
def save(self, obj):
"""
Save the object instance
Arguments:
- obj: concrete object instance
"""
raise NotImplementedError()
def set_relation_many_to_many(self, obj, field_name, objs):
"""
Setup a many to many relationship
Arguments:
- obj: concrete object instance
- field_name: name of the field that holds the relationship
- objs: collection of concrete objects to setup relationships with
"""
raise NotImplementedError()
def update(self, obj, field_name, value):
"""
update field on a concrete instance to value
this does not have to commit to the database, which will be
handled separately via the `save` method.
Arguments:
- obj: concrete object instance
- field_name
- value
"""
setattr(obj, field_name, value)
## INTERFACE (OPTIONAL / SITUATIONAL)
@reftag_to_cls
def get_field(self, concrete, field_name):
"""
Should retrun a field instance, if your backend does not use
classes to describe fields, leave this as is
Arguments:
- concrete: concrete class
- field_name
Returns:
- field instance
"""
return Field(field_name)
@reftag_to_cls
def get_fields(self, concrete):
"""
Should return a collection of fields, if your backend does not
use classes to describe fields, leave this as is
Arguments:
- concrete: concrete class
Returns:
- collection of field instances
"""
return [Field(name) for name in self.field_names(concrete)]
def clean(self, obj):
"""
Should take an object instance and validate / clean it
Arguments:
- obj: concrete object instance
"""
pass
@reftag_to_cls
def convert_field(self, concrete, field_name, value):
"""
Should take a value and a field definition and do a value
conversion if needed.
should return the new value.
Arguments:
- concrete: concrete class
- field_name
- value
"""
pass
def migrate_database(self, verbosity=0):
"""
Do database migrations
Arguments:
- verbosity <int>: arbitrary verbosity setting, 0 = be silent,
1 = show some info about migrations.
"""
pass
def is_database_migrated(self, **kwargs):
"""
Should return whether the database is fully migrated
Returns:
- bool
"""
return True
| {
"repo_name": "peeringdb/peeringdb-py",
"path": "peeringdb/backend.py",
"copies": "1",
"size": "11779",
"license": "apache-2.0",
"hash": 5302282113664410000,
"line_mean": 22.7002012072,
"line_max": 80,
"alpha_frac": 0.5731386366,
"autogenerated": false,
"ratio": 5.020886615515772,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00009758728739278706,
"num_lines": 497
} |
from functools import wraps
import inspect
class PreconditionError (TypeError):
pass
def preconditions(*precs):
stripped_source = lambda obj: inspect.getsource(obj).strip()
if not precs:
# This edge case makes ``@preconditions()`` efficiently delegate
# to the wrapped function, which I anticipate will be useful
# for stubbing and code consistency in applications:
def null_decorator(f):
f.nopre = f # Meet the .nopre interface requirement.
return f
return null_decorator
precinfo = []
for p in precs:
spec = inspect.getargspec(p)
if spec.varargs or spec.keywords:
raise PreconditionError(
('Invalid precondition must not accept * nor ** args:\n' +
' {!s}\n')
.format(stripped_source(p)))
i = -len(spec.defaults or ())
if i == 0:
appargs, closureargs = spec.args, []
else:
appargs, closureargs = spec.args[:i], spec.args[i:]
precinfo.append( (appargs, closureargs, p) )
def decorate(f):
fspec = inspect.getargspec(f)
for (appargs, closureargs, p) in precinfo:
for apparg in appargs:
if apparg not in fspec.args:
raise PreconditionError(
('Invalid precondition refers to unknown parameter {!r}:\n' +
' {!s}\n' +
'Known parameters: {!r}\n')
.format(
apparg,
stripped_source(p),
fspec.args))
for carg in closureargs:
if carg in fspec.args:
raise PreconditionError(
('Invalid precondition masks parameter {!r}:\n' +
' {!s}\n' +
'Known parameters: {!r}\n')
.format(
carg,
stripped_source(p),
fspec.args))
@wraps(f)
def g(*a, **kw):
args = inspect.getcallargs(f, *a, **kw)
for (appargs, _, p) in precinfo:
if not p(*[args[aa] for aa in appargs]):
raise PreconditionError(
'Precondition failed in call {!r}{}:\n {!s}\n'
.format(
g,
inspect.formatargvalues(
fspec.args,
fspec.varargs,
fspec.keywords,
args),
stripped_source(p)))
return f(*a, **kw)
g.nopre = f
return g
return decorate
| {
"repo_name": "nejucomo/preconditions",
"path": "preconditions.py",
"copies": "1",
"size": "2859",
"license": "mit",
"hash": -3726849062343543000,
"line_mean": 33.8658536585,
"line_max": 85,
"alpha_frac": 0.4470094439,
"autogenerated": false,
"ratio": 4.6792144026186575,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5626223846518659,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import itertools
import os
import datetime
import logging
from beaker.cache import cache_region
from rezoirclogs.utils import LogLine
log = logging.getLogger(__name__)
class Base(object):
"""Base resource object, useful for debugging"""
__name__ = ''
__parent__ = None
def __repr__(self):
return u'<%s: %s>' % (self.__class__.__name__, self.__name__)
def jailed(f):
@wraps(f)
def jailed_f(self, path):
if path.startswith(self.root_path):
return f(self, path)
return jailed_f
class Filesystem(object):
"""Jailed wrapper around os.path"""
def __init__(self, root_path):
self.root_path = os.path.abspath(os.path.normpath(root_path))
join = staticmethod(os.path.join)
@cache_region('short_term')
@jailed
def listdir(self, path):
log.debug('listdir %s', path)
return os.listdir(path)
@cache_region('short_term')
@jailed
def isrealfile(self, path):
log.debug('isrealfile %s', path)
return os.path.isfile(path) and not os.path.islink(path)
@cache_region('short_term')
@jailed
def isrealdir(self, path):
log.debug('isrealdir %s', path)
return os.path.isdir(path) and not os.path.islink(path)
def open(self, path):
log.debug('open %s', path)
return open(path)
def __str__(self):
return '<Filesystem jailed in %s'% self.root_path
class LogFile(Base):
def __init__(self, filesystem, path, date):
self.fs = filesystem
self.path = os.path.abspath(os.path.normpath(path))
self.date = datetime.date(int(date[0:4]), int(date[4:6]), int(date[6:8]))
def __iter__(self):
for line in self.fs.open(self.path):
yield LogLine(line)
def neighbour(self, n):
wanted = self.date + datetime.timedelta(days=n)
wanted = wanted.strftime('%Y%m%d')
try:
return self.__parent__[wanted]
except KeyError:
return
@property
def previous(self):
return self.neighbour(-1)
@property
def next(self):
return self.neighbour(1)
def search(self, query, after_date=None):
if after_date and after_date > self.date:
return
for num, line in enumerate(self):
if query in line:
yield (self, num, line)
class Chan(Base):
def __init__(self, filesystem, path, name):
self.fs = filesystem
self.path = os.path.abspath(os.path.normpath(path))
self.name = name
def _make_logfile(self, path, date):
l = LogFile(self.fs, path, date)
l.__name__ = date
l.__parent__ = self
return l
def __getitem__(self, date):
name = '%s.%s.log' % (self.name, date)
nextpath = self.fs.join(self.path, name)
if self.fs.isrealfile(nextpath):
return self._make_logfile(nextpath, date)
raise KeyError(date)
def __iter__(self):
for name in sorted(self.fs.listdir(self.path)):
if name.startswith(self.name):
path = self.fs.join(self.path, name)
if self.fs.isrealfile(path):
date = name.rsplit('.', 2)[1]
yield self._make_logfile(path, date)
def last(self, n):
return list(self)[:-n-1:-1]
def search(self, query, after_date=None):
for logfile in self:
for result in logfile.search(query, after_date):
yield result
class Directory(Base):
def __init__(self, filesystem, path):
self.fs = filesystem
self.path = os.path.abspath(os.path.normpath(path))
def _make_dir(self, name):
d = Directory(self.fs, self.fs.join(self.path, name))
d.__name__ = name
d.__parent__ = self
return d
def _make_chan(self, chan):
c = Chan(self.fs, self.path, chan)
c.__name__ = chan
c.__parent__ = self
return c
def __getitem__(self, name):
nextpath = self.fs.join(self.path, name)
if self.fs.isrealdir(nextpath):
return self._make_dir(name)
elif any(file.startswith(name) and file.endswith('.log') and self.fs.isrealfile(self.fs.join(self.path, file)) for file in self.fs.listdir(self.path)):
return self._make_chan(name)
else:
raise KeyError(name)
@property
def dirs(self):
for name in sorted(self.fs.listdir(self.path)):
path = self.fs.join(self.path, name)
if self.fs.isrealdir(path):
yield self._make_dir(name)
@property
def chans(self):
files = (name for name in sorted(self.fs.listdir(self.path))
if self.fs.isrealfile(self.fs.join(self.path, name)) and name.endswith('.log'))
for chan in set(name.rsplit('.', 2)[0] for name in files):
yield self._make_chan(chan)
def __iter__(self):
return self.dirs
def search(self, query, after_date=None):
for sub in itertools.chain(self.dirs, self.chans):
for result in sub.search(query, after_date):
yield result
| {
"repo_name": "supelec-rezo/rezoirclogs",
"path": "rezoirclogs/resources.py",
"copies": "1",
"size": "5193",
"license": "isc",
"hash": 6911226804130822000,
"line_mean": 28.1741573034,
"line_max": 159,
"alpha_frac": 0.5748122473,
"autogenerated": false,
"ratio": 3.5447098976109217,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9616140515036917,
"avg_score": 0.0006763259748006677,
"num_lines": 178
} |
from functools import wraps
import json as json_reader
def get_objects(json):
try:
objects = json
if objects is None:
objects = False
return objects
except AttributeError:
raise Exception('Invalid JSON = "%s"' % json)
# Decorators
def disable_for_loaddata(signal_handler):
"""
Decorator that turns off signal handlers when loading fixture data.
"""
@wraps(signal_handler)
def wrapper(*args, **kwargs):
if 'raw' in kwargs and kwargs['raw']:
return
signal_handler(*args, **kwargs)
return wrapper
def verify_json(fn):
def wrapped(json, *args, **kwargs):
if not issubclass(json.__class__, (dict, list)):
raise Exception('''Verify JSON could not find JSON object. Found:\n\n %s''' % (json,))
else:
return fn(json, *args, **kwargs)
return wrapped
def get_data(fn):
def wrapped(json, *args, **kwargs):
json = json_reader.loads(json)
objects = get_objects(json)
if objects:
return fn(json, objects, *args, **kwargs)
return wrapped
def capture_exception(fn):
def wrapper(obj, against):
try:
return fn(obj, against)
except Exception as e:
missing = [key for key in against.keys() if not key in obj.keys()]
extra = [key for key in obj.keys() if not key in against.keys()]
raise Exception('''%s
When comparing:
%s
---- against ---
%s
-------
Extra: %s
Missing: %s
''' % (e, obj, against.keys(), extra, missing))
return wrapper
# Test Functions
def is_none_or(fn):
def f(x):
return x is None or x == 'None' or fn(x)
return f
def is_array(verify_fn):
def f(data):
return all([verify_fn(obj) for obj in data])
return f
is_str = lambda x: issubclass(x.__class__, str)
is_int = lambda x: issubclass(x.__class__, int) or (issubclass(x.__class__, str) and x.isdigit())
is_bool = lambda x: issubclass(x.__class__, bool) or x == 'true' or x == 'false'
is_float = lambda x: issubclass(x.__class__, float)
is_date = lambda x: is_int(x)
# Functions to read datasets
@get_data
@verify_json
def obj_is(json, data, verify_fn):
return verify_fn(data)
@capture_exception
@verify_json
def verify_json_object(obj, against):
obj_keys = set(obj.keys())
against_keys = set(against.keys())
if obj_keys == against_keys:
for key in obj.keys():
lam = against[key]
val = obj[key]
if not lam(val):
raise Exception('''Key error for "%s". Value was "%s"''' % (key, val))
else:
missing_keys = obj_keys - against_keys
extra_keys = against_keys - obj_keys
raise Exception('''Keys were mismatched.
Missing Keys: %s
Extra Keys: %s
''' % (', '.join(missing_keys), ', '.join(extra_keys)))
return True
| {
"repo_name": "cdelguercio/slothauth",
"path": "slothauth/tests/utils.py",
"copies": "1",
"size": "3048",
"license": "apache-2.0",
"hash": -510687038420975700,
"line_mean": 25.0512820513,
"line_max": 99,
"alpha_frac": 0.5567585302,
"autogenerated": false,
"ratio": 3.795765877957659,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48525244081576585,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import json
from corehq import privileges
from corehq.apps.accounting.models import DefaultProductPlan
from django.http import HttpResponse
from corehq.const import USER_DATE_FORMAT
from django_prbac.decorators import requires_privilege
from django_prbac.exceptions import PermissionDenied
def requires_privilege_with_fallback(slug, **assignment):
"""
A version of the requires_privilege decorator which falls back
to the insufficient privileges page with an HTTP Status Code
of 402 that means "Payment Required"
"""
def decorate(fn):
@wraps(fn)
def wrapped(request, *args, **kwargs):
try:
if (hasattr(request, 'subscription')
and request.subscription is not None
and request.subscription.is_trial
and request.subscription.date_end is not None
):
edition_req = DefaultProductPlan.get_lowest_edition([slug])
plan_name = request.subscription.plan_version.user_facing_description['name']
feature_name = privileges.Titles.get_name_from_privilege(slug)
request.show_trial_notice = True
request.trial_info = {
'current_plan': plan_name,
'feature_name': feature_name,
'required_plan': edition_req,
'date_end': request.subscription.date_end.strftime(USER_DATE_FORMAT)
}
request.is_domain_admin = (hasattr(request, 'couch_user') and
request.couch_user.is_domain_admin(request.domain))
return requires_privilege(slug, **assignment)(fn)(
request, *args, **kwargs
)
except PermissionDenied:
request.show_trial_notice = False
from corehq.apps.domain.views import SubscriptionUpgradeRequiredView
return SubscriptionUpgradeRequiredView().get(
request, request.domain, slug
)
return wrapped
return decorate
def requires_privilege_plaintext_response(slug,
http_status_code=None, **assignment):
"""
A version of the requires_privilege decorator which returns an
HttpResponse object with HTTP Status Code of 412 by default and
content_type of tex/plain if the privilege is not found.
"""
def decorate(fn):
@wraps(fn)
def wrapped(request, *args, **kwargs):
try:
return requires_privilege(slug, **assignment)(fn)(
request, *args, **kwargs
)
except PermissionDenied:
return HttpResponse(
"You have lost access to this feature.",
status=http_status_code or 412, content_type="text/plain",
)
return wrapped
return decorate
def requires_privilege_json_response(slug, http_status_code=None,
get_response=None, **assignment):
"""
A version of the requires privilege decorator which returns an
HttpResponse object with an HTTP Status Code of 401 by default
and content_type application/json if the privilege is not found.
`get_response` is an optional parameter where you can specify the
format of response given an error message and status code.
The default response is:
```
{
'code': http_status_Code,
'message': error_message
}
```
"""
http_status_code = http_status_code or 401
if get_response is None:
get_response = lambda msg, code: {'code': code, 'message': msg}
def decorate(fn):
@wraps(fn)
def wrapped(request, *args, **kwargs):
try:
return requires_privilege(slug, **assignment)(fn)(
request, *args, **kwargs)
except PermissionDenied:
error_message = "You have lost access to this feature."
response = get_response(error_message, http_status_code)
return HttpResponse(json.dumps(response),
content_type="application/json", status=401)
return wrapped
return decorate
def requires_privilege_for_commcare_user(slug, **assignment):
"""
A version of the requires_privilege decorator which requires
the specified privilege only for CommCareUsers.
"""
def decorate(fn):
@wraps(fn)
def wrapped(request, *args, **kwargs):
if (hasattr(request, 'couch_user')
and request.couch_user.is_web_user()):
return fn(request, *args, **kwargs)
return requires_privilege_with_fallback(slug, **assignment)(fn)(
request, *args, **kwargs
)
return wrapped
return decorate
| {
"repo_name": "qedsoftware/commcare-hq",
"path": "corehq/apps/accounting/decorators.py",
"copies": "1",
"size": "5026",
"license": "bsd-3-clause",
"hash": 2744651253475444000,
"line_mean": 38.8888888889,
"line_max": 98,
"alpha_frac": 0.5829685635,
"autogenerated": false,
"ratio": 4.777566539923955,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0011581900584044919,
"num_lines": 126
} |
from functools import wraps
import json
from django.contrib import messages
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext
from corehq import privileges
from corehq.apps.accounting.models import DefaultProductPlan
from django.http import Http404, HttpResponse
from corehq.const import USER_DATE_FORMAT
from django_prbac.decorators import requires_privilege
from django_prbac.exceptions import PermissionDenied
def requires_privilege_with_fallback(slug, **assignment):
"""
A version of the requires_privilege decorator which falls back
to the insufficient privileges page with an HTTP Status Code
of 402 that means "Payment Required"
"""
def decorate(fn):
@wraps(fn)
def wrapped(request, *args, **kwargs):
try:
if (hasattr(request, 'subscription')
and request.subscription is not None
and request.subscription.is_trial
and request.subscription.date_end is not None
):
edition_req = DefaultProductPlan.get_lowest_edition_by_domain(
request.domain, [slug]
)
plan_name = request.subscription.plan_version.user_facing_description['name']
feature_name = privileges.Titles.get_name_from_privilege(slug)
request.show_trial_notice = True
request.trial_info = {
'current_plan': plan_name,
'feature_name': feature_name,
'required_plan': edition_req,
'date_end': request.subscription.date_end.strftime(USER_DATE_FORMAT)
}
request.is_domain_admin = (hasattr(request, 'couch_user') and
request.couch_user.is_domain_admin(request.domain))
return requires_privilege(slug, **assignment)(fn)(
request, *args, **kwargs
)
except PermissionDenied:
request.show_trial_notice = False
from corehq.apps.domain.views import SubscriptionUpgradeRequiredView
return SubscriptionUpgradeRequiredView().get(
request, request.domain, slug
)
return wrapped
return decorate
def requires_privilege_plaintext_response(slug,
http_status_code=None, **assignment):
"""
A version of the requires_privilege decorator which returns an
HttpResponse object with HTTP Status Code of 412 by default and
content_type of tex/plain if the privilege is not found.
"""
def decorate(fn):
@wraps(fn)
def wrapped(request, *args, **kwargs):
try:
return requires_privilege(slug, **assignment)(fn)(
request, *args, **kwargs
)
except PermissionDenied:
return HttpResponse(
"You have lost access to this feature.",
status=http_status_code or 412, content_type="text/plain",
)
return wrapped
return decorate
def requires_privilege_json_response(slug, http_status_code=None,
get_response=None, **assignment):
"""
A version of the requires privilege decorator which returns an
HttpResponse object with an HTTP Status Code of 405 by default
and content_type application/json if the privilege is not found.
`get_response` is an optional parameter where you can specify the
format of response given an error message and status code.
The default response is:
```
{
'code': http_status_Code,
'message': error_message
}
```
todo accounting for API requests
"""
http_status_code = http_status_code or 405
if get_response is None:
get_response = lambda msg, code: {'code': code, 'message': msg}
def decorate(fn):
@wraps(fn)
def wrapped(request, *args, **kwargs):
try:
return requires_privilege(slug, **assignment)(fn)(
request, *args, **kwargs)
except PermissionDenied:
error_message = "You have lost access to this feature."
response = get_response(error_message, http_status_code)
return HttpResponse(json.dumps(response),
content_type="application/json")
return wrapped
return decorate
def requires_privilege_for_commcare_user(slug, **assignment):
"""
A version of the requires_privilege decorator which requires
the specified privilege only for CommCareUsers.
"""
def decorate(fn):
@wraps(fn)
def wrapped(request, *args, **kwargs):
if (hasattr(request, 'couch_user')
and request.couch_user.is_web_user()):
return fn(request, *args, **kwargs)
return requires_privilege_with_fallback(slug, **assignment)(fn)(
request, *args, **kwargs
)
return wrapped
return decorate
| {
"repo_name": "puttarajubr/commcare-hq",
"path": "corehq/apps/accounting/decorators.py",
"copies": "1",
"size": "5308",
"license": "bsd-3-clause",
"hash": -3320427301677467600,
"line_mean": 38.9097744361,
"line_max": 98,
"alpha_frac": 0.5892991711,
"autogenerated": false,
"ratio": 4.79494128274616,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.588424045384616,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import json
from django.db.models.query import ValuesQuerySet
from django.http import HttpResponseBadRequest, HttpResponse, HttpResponseForbidden
from devilry.defaults.encoding import CHARSET
class SerializableResult(object):
""" Stores Python objects for serialization with :class:`devilry.simplified.serializers.SerializerRegistry`. """
def __init__(self, result, httpresponsecls=HttpResponse, encoding=CHARSET):
self.result = result
self.httpresponsecls = httpresponsecls
self.encoding = encoding
class ErrorMsgSerializableResult(SerializableResult):
def __init__(self, errormessage, httpresponsecls):
super(ErrorMsgSerializableResult, self).__init__(dict(errormessages=[errormessage]),
httpresponsecls=httpresponsecls)
class ForbiddenSerializableResult(ErrorMsgSerializableResult):
def __init__(self, exception=None):
if exception and exception.message:
errormessage = exception.message
else:
errormessage = 'Forbidden'
super(ForbiddenSerializableResult, self).__init__(errormessage,
HttpResponseForbidden)
class InvalidUsernameSerializableResult(ErrorMsgSerializableResult):
def __init__(self, username):
super(InvalidUsernameSerializableResult, self).__init__('Invalid username: {0}'.format(username),
HttpResponseBadRequest)
class SerializerRegistryItem(object):
def __init__(self, serializer, deserializer):
self.serializer = serializer
self.deserializer = deserializer
class SerializerRegistry(dict):
def create_response(self, result, comformat, content_type=None):
i = self[comformat]
content_type = content_type or comformat
return result.httpresponsecls(i.serializer(result.result),
content_type='{0}; encoding={1}'.format(content_type, result.encoding))
def deserialize(self, comformat, data):
i = self[comformat]
return i.deserializer(data)
def json_serialize_handler(obj):
#print type(obj)
if isinstance(obj, ValuesQuerySet):
return list(obj)
if hasattr(obj, 'isoformat'):
#return obj.strftime('%Y-%m-%d')
return obj.strftime('%Y-%m-%dT%H:%M:%S')
else:
raise TypeError('Object of type %s with value of %s is not JSON serializable' % (
type(obj), repr(obj)))
def json_serialize(s):
return json.dumps(s, default=json_serialize_handler, indent=2)
serializers = SerializerRegistry()
serializers['application/json'] = SerializerRegistryItem(json_serialize, json.loads)
def _serialize(content_type_override=None):
def decorator(f):
@wraps(f)
def wrapper(self, request, *args, **kwargs):
comformat = request.META.get('Accept', 'application/json')
if not comformat in serializers:
return HttpResponseBadRequest(
"Bad request: %s" % comformat,
format='text/plain; encoding={0}'.format(CHARSET))
self.comformat = comformat
result = f(self, request, *args, **kwargs) # returns a SerializableResult object
return serializers.create_response(result, comformat, content_type_override)
return wrapper
return decorator
def serialize(f=None, content_type_override=None):
""" Decorator to serialize response.
:param content_type_override: Override content type of response.
Serialization is still done using the the communication format from the
Accept header, however the content-type header will use this format instead
of the communication format. Mainly useful when browsers need text/html
response to behave, such as with file upload.
"""
decorator = _serialize(content_type_override=content_type_override)
if f:
return decorator(f)
else:
return decorator
| {
"repo_name": "vegarang/devilry-django",
"path": "devilry/restful/serializers.py",
"copies": "1",
"size": "4059",
"license": "bsd-3-clause",
"hash": 1314081677700295700,
"line_mean": 40,
"line_max": 116,
"alpha_frac": 0.6644493718,
"autogenerated": false,
"ratio": 4.6020408163265305,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.004590491287522981,
"num_lines": 99
} |
from functools import wraps
import json
from functools import update_wrapper
from flask import Response
from flask import make_response, request, current_app
from ifsApprover import db
def check_auth(username, password):
"""This function is called to check if a username /
password combination is valid.
"""
return db.check_login(username, password)
def authenticate():
"""Sends a 401 response that enables basic auth"""
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
def copy_fields(db_row, fields):
result = {}
for key in fields:
result[key] = db_row[key]
return result
def make_json_response(data={}, status="ok"):
data = json.dumps({
"status": status,
"data": data
})
return Response(data, mimetype='application/json')
# some parts from http://flask.pocoo.org/snippets/56/
def crossdomain(methods=None,
attach_to_all=True,
automatic_options=True):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Credentials'] = "true"
h['Access-Control-Allow-Origin'] = request.headers.get("Origin", "*")
h['Access-Control-Allow-Methods'] = get_methods()
# allow every header that was requested
if "Access-Control-Request-Headers" in request.headers:
h['Access-Control-Allow-Headers'] = request.headers["Access-Control-Request-Headers"]
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator | {
"repo_name": "ktt-ol/ifs-approver",
"path": "backend/ifsApprover/web/helper.py",
"copies": "1",
"size": "2645",
"license": "mit",
"hash": 6319484061007921000,
"line_mean": 29.4137931034,
"line_max": 101,
"alpha_frac": 0.6223062382,
"autogenerated": false,
"ratio": 4.252411575562701,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5374717813762702,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import json
from werkzeug.wrappers import Response
from flask import request
import trafaret as t
def check_data(trafaret):
""" Check request with trafaret Visitor instance """
def check_data_(method):
@wraps(method)
def method_with_check(self, *a, **kw):
try:
data = trafaret.check(request)
kw.update(data)
return method(self, *a, **kw)
except t.DataError as error:
return error.as_dict(), 400
return method_with_check
return check_data_
def encode_result(encoders):
""" Check Accept header for mime against encoders list.
If not Accept header, encode with first encoder.
"""
def get_encoder(encoders):
if not request.accept_mimetypes:
encoder = encoders[0]
return encoder, encoder.mimes()[0]
for encoder in encoders:
for mime in encoder.mimes():
if mime in request.accept_mimetypes:
return encoder, mime
return None, repr([mime for mime in encoder.mimes() for encoder in encoders])
def wrapper(view):
@wraps(view)
def inner(*a, **kw):
encoder, mime = get_encoder(encoders)
if encoder is None:
return Response(mime, 406)
res = view(*a, **kw)
if isinstance(res, tuple):
res = (encoder.encode(res[0], mime=mime),) + res[1:]
else:
res = (encoder.encode(res, mime=mime), )
return Response(*res, content_type=mime)
return inner
return wrapper
class JSONEncoder(object):
""" Sample JSON encoder for ``encode_result`` decorator """
def mimes(self):
"Return list of mime types supported by encoder"
return ('application/json', )
def encode(self, data, mime=None):
"Encode given data"
return json.dumps(data)
| {
"repo_name": "Deepwalker/Flask-Bundle",
"path": "flask_bundle/utils.py",
"copies": "1",
"size": "1976",
"license": "bsd-3-clause",
"hash": -4657894230963616000,
"line_mean": 29.875,
"line_max": 85,
"alpha_frac": 0.5774291498,
"autogenerated": false,
"ratio": 4.240343347639485,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5317772497439486,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import json
import base64
from urllib import unquote_plus
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.client import AccessTokenRefreshError
from oauth2client.django_orm import Storage
from oauth2client import xsrfutil
from django.http import HttpResponseRedirect
from django.http import HttpResponseBadRequest
from django.conf import settings
import httplib2
import apiclient.discovery
from models import CredentialsModel
class GApi(object):
def __init__(self, client_id='', client_secret='', scope='', redirect_uri=None):
self.flow = OAuth2WebServerFlow(client_id,
client_secret,
scope,
redirect_uri=redirect_uri,
access_type='offline',
approval_prompt='force')
def oauth2_required(self, view_function):
"""
Decorator function that will initiate OAUTH2 WEB flow with google services
:param view_function:
:return:
"""
@wraps(view_function)
def wrapper(request, *args, **kwargs):
def oauth2_step1():
state = {
# token to check on redirect
'token': xsrfutil.generate_token(settings.SECRET_KEY, request.user)
}
# extra params that need to be kept over the auth process
if 'oauth2_state' in kwargs:
state['oauth2_state'] = kwargs['oauth2_state']
# encode the whole stuff
base64_state = base64.urlsafe_b64encode(str(json.dumps(state)))
# set the oauth2 state param
self.flow.params['state'] = base64_state
authorize_url = self.flow.step1_get_authorize_url()
return HttpResponseRedirect(authorize_url)
storage = Storage(CredentialsModel, 'id', request.user, 'credential')
credential = storage.get()
if credential is None or credential.invalid is True:
return oauth2_step1()
else:
# refresh credential if needed
if credential.access_token_expired:
try:
credential.refresh(httplib2.Http())
except AccessTokenRefreshError:
return oauth2_step1()
# remove existing oauth2_state params
if 'oauth2_state' in kwargs:
del kwargs['oauth2_state']
return view_function(request, *args, **kwargs)
return wrapper
def oauth2_redirect(self, view_function):
"""
Decorator function to handle the redirect after the OAUTH2 WEB process
:param view_function:
:return:
"""
@wraps(view_function)
def wrapper(request, *args, **kwargs):
# decode the oauth2 state param
state_str = str(request.REQUEST['state'])
# fix here state might be urlencoded twice along the way and sucks if that happens
while '%' in state_str:
state_str = unquote_plus(state_str)
state = json.loads(base64.urlsafe_b64decode(state_str))
# validate token
if not 'token' in state or not xsrfutil.validate_token(settings.SECRET_KEY, str(state['token']),
request.user):
return HttpResponseBadRequest()
# save oauth2 credential in db
credential = self.flow.step2_exchange(request.REQUEST)
storage = Storage(CredentialsModel, 'id', request.user, 'credential')
storage.put(credential)
# put oauth2_state params in kwargs
if 'oauth2_state' in state:
kwargs['oauth2_state'] = state['oauth2_state']
return view_function(request, *args, **kwargs)
return wrapper
@classmethod
def get_gservice(cls, request, api_name, version):
"""
Get a google api service
:param request: the request to check oauth credential
:param api_name: Google api name ex 'drive'
:param version: Google api version name ex v2''
:return: the service object
"""
storage = Storage(CredentialsModel, 'id', request.user, 'credential')
credential = storage.get()
http = httplib2.Http()
http = credential.authorize(http)
return apiclient.discovery.build(api_name, version, http=http)
| {
"repo_name": "Fl0r14n/django_googleapi",
"path": "gdrive/gapi.py",
"copies": "1",
"size": "4667",
"license": "mit",
"hash": 4157803400491723300,
"line_mean": 39.9385964912,
"line_max": 108,
"alpha_frac": 0.5731733448,
"autogenerated": false,
"ratio": 4.796505652620761,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000882779302749271,
"num_lines": 114
} |
from functools import wraps
import json
import flask
from flask import session
from dataactbroker.handlers.aws.session import LoginSession
from dataactbroker.handlers.userHandler import UserHandler
from dataactbroker.handlers.interfaceHolder import InterfaceHolder
def permissions_check(f=None,permissionList=[]):
def actual_decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
errorMessage = "Login Required"
if LoginSession.isLogin(session):
userDb = UserHandler()
try:
user = userDb.getUserByUID(session["name"])
validUser = True
for permission in permissionList :
if(not userDb.hasPermission(user, permission)) :
validUser = False
finally:
InterfaceHolder.closeOne(userDb)
if(validUser) :
return f(*args, **kwargs)
errorMessage = "Wrong User Type"
elif "check_email_token" in permissionList:
if(LoginSession.isRegistering(session)) :
return f(*args, **kwargs)
else :
errorMessage = "unauthorized"
elif "check_password_token" in permissionList :
if(LoginSession.isResetingPassword(session)) :
return f(*args, **kwargs)
else :
errorMessage = "unauthorized"
returnResponse = flask.Response()
returnResponse.headers["Content-Type"] = "application/json"
returnResponse.status_code = 401 # Error code
responseDict = {}
responseDict["message"] = errorMessage
returnResponse.set_data(json.dumps(responseDict))
return returnResponse
return decorated_function
if not f:
def waiting_for_func(f):
return actual_decorator(f)
return waiting_for_func
else:
return actual_decorator(f)
| {
"repo_name": "fedspendingtransparency/data-act-broker",
"path": "dataactbroker/permissions.py",
"copies": "1",
"size": "2073",
"license": "cc0-1.0",
"hash": -4166909762013207600,
"line_mean": 38.8653846154,
"line_max": 72,
"alpha_frac": 0.5701881331,
"autogenerated": false,
"ratio": 4.923990498812351,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.006520969657729529,
"num_lines": 52
} |
from functools import wraps
import json
import inspect
from django.db.models import Q
from django.http import Http404
from django.http.response import (
HttpResponse,
HttpResponseBadRequest,
)
from django.shortcuts import (
get_object_or_404,
render,
)
from django.views.decorators.http import require_http_methods
from django.utils.translation import ugettext as _i
from django.utils.timezone import now
from kirppu.app.models import (
Item,
Receipt,
Clerk,
Counter,
ReceiptItem,
Vendor,
)
from kirppu.kirppuauth.models import User
class AjaxError(Exception):
def __init__(self, status, message='AJAX request failed'):
self.status = status
self.message = message
def render(self):
return HttpResponse(
self.message,
content_type='text/plain',
status=self.status,
)
# Some HTTP Status codes that are used here.
RET_BAD_REQUEST = 400 # Bad request
RET_UNAUTHORIZED = 401 # Unauthorized, though, not expecting Basic Auth...
RET_CONFLICT = 409 # Conflict
RET_AUTH_FAILED = 419 # Authentication timeout
RET_LOCKED = 423 # Locked resource
RET_OK = 200 # OK
def raise_if_item_not_available(item):
"""Raise appropriate AjaxError if item is not in buyable state."""
if item.state == Item.STAGED:
# Staged somewhere other?
raise AjaxError(RET_LOCKED, 'Item is already staged to be sold.')
elif item.state == Item.ADVERTISED:
raise AjaxError(RET_CONFLICT, 'Item has not been brought to event.')
elif item.state in (Item.SOLD, Item.COMPENSATED):
raise AjaxError(RET_CONFLICT, 'Item has already been sold.')
elif item.state == Item.RETURNED:
raise AjaxError(RET_CONFLICT, 'Item has already been returned to owner.')
class AjaxFunc(object):
def __init__(self, func, url, method):
self.name = func.func_name # name of the view function
self.url = url # url for url config
self.view_name = 'api_' + self.name # view name for url config
self.view = 'kirppu:' + self.view_name # view name for templates
self.method = method # http method for templates
# Registry for ajax functions. Maps function names to AjaxFuncs.
AJAX_FUNCTIONS = {}
def checkout_js(request):
"""
Render the JavaScript file that defines the AJAX API functions.
"""
return render(
request,
"app_checkout_api.js",
{'funcs': AJAX_FUNCTIONS},
)
def _get_item_or_404(code):
try:
item = Item.get_item_by_barcode(code)
except Item.DoesNotExist:
item = None
if item is None:
raise Http404(_i(u"No item found matching '{0}'").format(code))
return item
def get_clerk(request):
"""
Get the Clerk object associated with a request.
Raise AjaxError if session is invalid or clerk is not found.
"""
for key in ["clerk", "clerk_token", "counter"]:
if key not in request.session:
raise AjaxError(RET_UNAUTHORIZED, _i(u"Not logged in."))
clerk_id = request.session["clerk"]
clerk_token = request.session["clerk_token"]
try:
clerk_object = Clerk.objects.get(pk=clerk_id)
except Clerk.DoesNotExist:
raise AjaxError(RET_UNAUTHORIZED, _i(u"Clerk not found."))
if clerk_object.access_key != clerk_token:
return AjaxError(RET_UNAUTHORIZED, _i(u"Bye."))
return clerk_object
def get_counter(request):
"""
Get the Counter object associated with a request.
Raise AjaxError if session is invalid or counter is not found.
"""
if "counter" not in request.session:
raise AjaxError(RET_UNAUTHORIZED, _i(u"Not logged in."))
counter_id = request.session["counter"]
try:
counter_object = Counter.objects.get(pk=counter_id)
except Counter.DoesNotExist:
raise AjaxError(
RET_UNAUTHORIZED,
_i(u"Counter has gone missing."),
)
return counter_object
def ajax_func(url, method='POST', counter=True, clerk=True):
"""
Decorate the view function properly and register it.
The decorated view will not be called if
1. the request is not an AJAX request,
2. the request method does not match the given method,
3. counter is True but counter has not been validated,
4. clerk is True but a clerk has not logged in,
OR
5. the parameters after the request are not filled by the request
data.
"""
def decorator(func):
# Get argspec before any decoration.
(args, _, _, defaults) = inspect.getargspec(func)
# Register the function.
name = func.func_name
AJAX_FUNCTIONS[name] = AjaxFunc(func, url, method)
# Decorate func.
func = require_http_methods([method])(func)
@wraps(func)
def wrapper(request, **kwargs):
if not request.is_ajax():
return HttpResponseBadRequest("Invalid requester")
# Pass request params to the view as keyword arguments.
# The first argument is skipped since it is the request.
request_data = request.GET if method == 'GET' else request.POST
for arg in args[1:]:
try:
kwargs[arg] = request_data[arg]
except KeyError:
return HttpResponseBadRequest()
try:
# Check session counter and clerk.
if counter:
get_counter(request)
if clerk:
get_clerk(request)
result = func(request, **kwargs)
except AjaxError as ae:
return ae.render()
if isinstance(result, HttpResponse):
return result
else:
return HttpResponse(
json.dumps(result),
status=200,
content_type='application/json',
)
return wrapper
return decorator
def item_mode_change(code, from_, to):
item = _get_item_or_404(code)
if item.state == from_:
item.state = to
item.save()
return item.as_dict()
else:
# Item not in expected state.
raise AjaxError(
RET_CONFLICT,
_i(u"Unexpected item state: {state}").format(state=item.state),
)
@ajax_func('^clerk/login$', clerk=False, counter=False)
def clerk_login(request, code, counter):
try:
counter_obj = Counter.objects.get(identifier=counter)
except Counter.DoesNotExist:
raise AjaxError(RET_AUTH_FAILED, _i(u"Counter has gone missing."))
try:
clerk = Clerk.by_code(code)
except ValueError:
clerk = None
if clerk is None:
raise AjaxError(RET_AUTH_FAILED, _i(u"Unauthorized."))
clerk_data = clerk.as_dict()
active_receipts = Receipt.objects.filter(clerk=clerk, status=Receipt.PENDING)
if active_receipts:
if len(active_receipts) > 1:
clerk_data["receipts"] = [receipt.as_dict() for receipt in active_receipts]
clerk_data["receipt"] = "MULTIPLE"
else:
receipt = active_receipts[0]
request.session["receipt"] = receipt.pk
clerk_data["receipt"] = receipt.as_dict()
request.session["clerk"] = clerk.pk
request.session["clerk_token"] = clerk.access_key
request.session["counter"] = counter_obj.pk
return clerk_data
@ajax_func('^clerk/logout$', clerk=False, counter=False)
def clerk_logout(request):
"""
Logout currently logged in clerk.
"""
clerk_logout_fn(request)
return HttpResponse()
def clerk_logout_fn(request):
"""
The actual logout procedure that can be used from elsewhere too.
:param request: Active request, for session access.
"""
for key in ["clerk", "clerk_token", "counter"]:
request.session.pop(key, None)
@ajax_func('^counter/validate$', clerk=False, counter=False)
def counter_validate(request, code):
"""
Validates the counter identifier and returns its exact form, if it is
valid.
"""
try:
counter = Counter.objects.get(identifier__iexact=code)
except Counter.DoesNotExist:
raise AjaxError(RET_AUTH_FAILED)
return {"counter": counter.identifier,
"name": counter.name}
@ajax_func('^item/find$', method='GET')
def item_find(request, code):
item = _get_item_or_404(code)
if "available" in request.GET:
raise_if_item_not_available(item)
return item.as_dict()
@ajax_func('^item/list$', method='GET')
def item_list(request, vendor):
items = Item.objects.filter(vendor__id=vendor)
return map(lambda i: i.as_dict(), items)
@ajax_func('^item/checkin$')
def item_checkin(request, code):
return item_mode_change(code, Item.ADVERTISED, Item.BROUGHT)
@ajax_func('^item/checkout$')
def item_checkout(request, code):
return item_mode_change(code, Item.BROUGHT, Item.RETURNED)
@ajax_func('^item/compensate$')
def item_compensate(request, code):
return item_mode_change(code, Item.SOLD, Item.COMPENSATED)
@ajax_func('^vendor/get$', method='GET')
def vendor_get(request, id):
try:
vendor = Vendor.objects.get(pk=int(id))
except (ValueError, Vendor.DoesNotExist):
raise AjaxError(RET_BAD_REQUEST, _i(u"Invalid vendor id"))
else:
return vendor.as_dict()
@ajax_func('^vendor/find$', method='GET')
def vendor_find(request, q):
clauses = [Q(vendor__isnull=False)]
for part in q.split():
clause = (
Q(phone=part)
| Q(username__icontains=part)
| Q(first_name__icontains=part)
| Q(last_name__icontains=part)
| Q(email__icontains=part)
)
try:
clause = clause | Q(vendor__id=int(part))
except ValueError:
pass
clauses.append(clause)
return [
u.vendor.as_dict()
for u in User.objects.filter(*clauses).all()
]
@ajax_func('^receipt/start$')
def receipt_start(request):
receipt = Receipt()
receipt.clerk = get_clerk(request)
receipt.counter = get_counter(request)
receipt.save()
request.session["receipt"] = receipt.pk
return receipt.as_dict()
@ajax_func('^item/reserve$')
def item_reserve(request, code):
item = _get_item_or_404(code)
receipt_id = request.session["receipt"]
receipt = get_object_or_404(Receipt, pk=receipt_id)
raise_if_item_not_available(item)
if item.state in (Item.BROUGHT, Item.MISSING):
item.state = Item.STAGED
item.save()
ReceiptItem.objects.create(item=item, receipt=receipt)
# receipt.items.create(item=item)
receipt.calculate_total()
receipt.save()
ret = item.as_dict()
ret.update(total=receipt.total_cents)
return ret
else:
# Not in expected state.
raise AjaxError(RET_CONFLICT)
@ajax_func('^item/release$')
def item_release(request, code):
item = _get_item_or_404(code)
receipt_id = request.session["receipt"]
receipt = get_object_or_404(Receipt, pk=receipt_id)
last_added_item = ReceiptItem.objects\
.filter(receipt=receipt, item=item, action=ReceiptItem.ADD)\
.order_by("-add_time")
if len(last_added_item) == 0:
raise AjaxError(RET_CONFLICT, _i(u"Item is not added to receipt."))
assert len(last_added_item) == 1
last_added_item = last_added_item[0]
last_added_item.action = ReceiptItem.REMOVED_LATER
last_added_item.save()
removal_entry = ReceiptItem(item=item, receipt=receipt, action=ReceiptItem.REMOVE)
removal_entry.save()
receipt.calculate_total()
receipt.save()
item.state = Item.BROUGHT
item.save()
return removal_entry.as_dict()
@ajax_func('^receipt/finish$')
def receipt_finish(request):
receipt_id = request.session["receipt"]
receipt = get_object_or_404(Receipt, pk=receipt_id)
if receipt.status != Receipt.PENDING:
raise AjaxError(RET_CONFLICT)
receipt.sell_time = now()
receipt.status = Receipt.FINISHED
receipt.save()
Item.objects.filter(receipt=receipt, receiptitem__action=ReceiptItem.ADD).update(state=Item.SOLD)
del request.session["receipt"]
return receipt.as_dict()
@ajax_func('^receipt/abort$')
def receipt_abort(request):
receipt_id = request.session["receipt"]
receipt = get_object_or_404(Receipt, pk=receipt_id)
if receipt.status != Receipt.PENDING:
raise AjaxError(RET_CONFLICT)
# For all ADDed items, add REMOVE-entries and return the real Item's back to available.
added_items = ReceiptItem.objects.filter(receipt_id=receipt_id, action=ReceiptItem.ADD)
for receipt_item in added_items.only("item"):
item = receipt_item.item
ReceiptItem(item=item, receipt=receipt, action=ReceiptItem.REMOVE).save()
item.state = Item.BROUGHT
item.save()
# Update ADDed items to be REMOVED_LATER. This must be done after the real Items have
# been updated, and the REMOVE-entries added, as this will change the result set of
# the original added_items -query (to always return zero entries).
added_items.update(action=ReceiptItem.REMOVED_LATER)
# End the receipt. (Must be done after previous updates, so calculate_total calculates
# correct sum.)
receipt.sell_time = now()
receipt.status = Receipt.ABORTED
receipt.calculate_total()
receipt.save()
del request.session["receipt"]
return receipt.as_dict()
def _get_receipt_data_with_items(**kwargs):
receipt = get_object_or_404(Receipt, **kwargs)
receipt_items = ReceiptItem.objects.filter(receipt_id=receipt.pk).order_by("add_time")
data = receipt.as_dict()
data["items"] = [item.as_dict() for item in receipt_items]
return data
@ajax_func('^receipt$', method='GET')
def receipt_get(request):
"""
Find receipt by receipt id or one item in the receipt.
"""
if "id" in request.GET:
receipt_id = int(request.GET.get("id"))
elif "item" in request.GET:
item_code = request.GET.get("item")
receipt_id = get_object_or_404(ReceiptItem, item__code=item_code, action=ReceiptItem.ADD).receipt_id
else:
raise AjaxError(RET_BAD_REQUEST)
return _get_receipt_data_with_items(pk=receipt_id)
@ajax_func('^receipt/activate$')
def receipt_activate(request):
"""
Activate previously started pending receipt.
"""
clerk = request.session["clerk"]
receipt_id = int(request.POST.get("id"))
data = _get_receipt_data_with_items(pk=receipt_id, clerk__id=clerk, status=Receipt.PENDING)
request.session["receipt"] = receipt_id
return data
| {
"repo_name": "mniemela/kirppu",
"path": "kirppu/app/checkout/api.py",
"copies": "1",
"size": "14825",
"license": "mit",
"hash": 8813514621929916000,
"line_mean": 28.4731610338,
"line_max": 108,
"alpha_frac": 0.6328499157,
"autogenerated": false,
"ratio": 3.6044249939217115,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47372749096217115,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import json
import logging
import os
import shutil
import sys
import click
import yaml
from . import clipboard, completion, config, checkers, importers
from .crypt import create_keys, encrypt, decrypt
from .database import Database
from .table import Table
from .utils import genpass, ensure_dependencies
from .history import clone
from .validators import validate_config, validate_cols, validate_remote
__version__ = "1.6.0"
pass_db = click.make_pass_decorator(Database, ensure=False)
logging.basicConfig(format="[%(levelname)s:passpie.%(module)s]: %(message)s")
def ensure_passphrase(passphrase, config):
encrypted = encrypt('OK', recipient=config['recipient'], homedir=config['homedir'])
decrypted = decrypt(encrypted,
recipient=config['recipient'],
passphrase=passphrase,
homedir=config['homedir'])
if not decrypted == 'OK':
message = "Wrong passphrase"
message_full = u"Wrong passphrase for recipient: {} in homedir: {}".format(
config['recipient'],
config['homedir'],
)
logging.debug(message_full)
raise click.ClickException(click.style(message, fg='red'))
def logging_exception(exceptions=[Exception]):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except (click.ClickException, click.Abort):
raise
except tuple(exceptions) as e:
logging_level = logging.getLogger().getEffectiveLevel()
if logging_level == logging.DEBUG:
raise
elif logging_level == logging.CRITICAL:
pass
else:
logging.error(str(e))
sys.exit(1)
return wrapper
return decorator
class AliasGroup(click.Group):
def get_command(self, ctx, name):
cmd = super(AliasGroup, self).get_command(ctx, name)
aliases = ctx.params.get('configuration', {}).get('aliases', {})
if cmd:
return cmd
elif name in aliases:
aliased_name = aliases[name]
cmd = super(AliasGroup, self).get_command(ctx, aliased_name)
return cmd
@click.group(cls=AliasGroup, invoke_without_command=True)
@click.option('-D', '--database', 'path', help='Database path or url to remote repository',
envvar="PASSPIE_DATABASE")
@click.option('--autopull', help='Autopull changes from remote pository',
callback=validate_remote, envvar="PASSPIE_AUTOPULL")
@click.option('--autopush', help='Autopush changes to remote pository',
callback=validate_remote, envvar="PASSPIE_AUTOPUSH")
@click.option('--config', 'configuration', help='Path to configuration file',
callback=validate_config, type=click.Path(readable=True, exists=True),
envvar="PASSPIE_CONFIG")
@click.option('-v', '--verbose', help='Activate verbose output', count=True,
envvar="PASSPIE_VERBOSE")
@click.version_option(version=__version__)
@click.pass_context
def cli(ctx, path, autopull, autopush, configuration, verbose):
try:
ensure_dependencies()
except RuntimeError as e:
raise click.ClickException(click.style(str(e), fg='red'))
# Setup database
db = Database(configuration)
ctx.obj = db
# Verbose
if verbose == 1:
logging.getLogger().setLevel(logging.INFO)
elif verbose > 1:
click.secho("Verbose level set to debug, sensitive data might be logged", fg="yellow")
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.CRITICAL)
if ctx.invoked_subcommand is None:
ctx.invoke(cli.commands['list'])
@cli.command(help='Generate completion scripts for shells')
@click.argument('shell_name', type=click.Choice(completion.SHELLS),
default=None, required=False)
@logging_exception()
@pass_db
@click.pass_context
def complete(ctx, db, shell_name):
commands = cli.commands.keys()
script = completion.script(shell_name, db.path, commands)
click.echo(script)
@cli.command(name='list')
@logging_exception()
@pass_db
def list_database(db):
"""Print credential as a table"""
credentials = db.credentials()
if credentials:
table = Table(
db.config['headers'],
table_format=db.config['table_format'],
colors=db.config['colors'],
hidden=db.config['hidden'],
hidden_string=db.config['hidden_string'],
)
click.echo(table.render(credentials))
@cli.command(name="config")
@click.argument('level', type=click.Choice(['global', 'local', 'current']),
default='current', required=False)
@logging_exception()
@pass_db
def check_config(db, level):
"""Show current configuration for shell"""
if level == 'global':
configuration = config.read(config.HOMEDIR, '.passpierc')
elif level == 'local':
configuration = config.read(os.path.join(db.path))
elif level == 'current':
configuration = db.config
if configuration:
click.echo(yaml.safe_dump(configuration, default_flow_style=False))
@cli.command(help="Initialize new passpie database")
@click.option('-f', '--force', is_flag=True, help="Force overwrite database")
@click.option('-r', '--recipient', help="Keyring default recipient")
@click.option('-c', '--clone', 'clone_repo', help="Clone a remote repository")
@click.option('--no-git', is_flag=True, help="Don't create a git repository")
@click.option('--passphrase', help="Database passphrase")
@logging_exception()
@pass_db
def init(db, force, clone_repo, recipient, no_git, passphrase):
if force:
if os.path.isdir(db.path):
shutil.rmtree(db.path)
logging.info('removed directory %s' % db.path)
elif os.path.isfile(db.path):
os.remove(db.path)
logging.info('removed file %s' % db.path)
try:
if clone_repo and not config.is_repo_url(db.path):
if config.is_repo_url(clone_repo):
clone(clone_repo, db.path)
else:
message = u"url is not a remote repo: {}".format(clone_repo)
raise click.ClickException(click.style(message, fg='red'))
else:
os.makedirs(db.path)
except (SystemError, OSError):
message = u"Path exists '{}'. `--force` to overwrite".format(db.path)
raise click.ClickException(click.style(message, fg='red'))
if recipient:
logging.info('create .passpierc file at %s' % db.path)
config.create(db.path, defaults=dict(recipient=recipient))
else:
logging.info('create .passpierc file at %s' % db.path)
config.create(db.path, defaults={})
if not passphrase:
passphrase = click.prompt('Passphrase',
hide_input=True,
confirmation_prompt=True)
keys_filepath = os.path.join(db.config['path'], '.keys')
create_keys(passphrase, keys_filepath, key_length=db.config['key_length'])
if not no_git:
logging.info('init git repository in %s' % db.path)
db.repo.init()
db.repo.commit(message='Initialized git repository', add=True)
click.echo("Initialized database in {}".format(db.path))
@cli.command(help='Add new credential to database')
@click.argument("fullname")
@click.option('-p', '--password', help="Credential password")
@click.option('-r', '--random', is_flag=True, help="Randonly generate password")
@click.option('-P', '--pattern', help="Random password regex pattern")
@click.option('-c', '--comment', default="", help="Credential comment")
@click.option('-f', '--force', is_flag=True, help="Force overwriting")
@click.option('-i', '--interactive', is_flag=True, help="Interactively edit credential")
@click.option('-C', '--copy', is_flag=True, help="Copy password to clipboard")
@logging_exception()
@pass_db
def add(db, fullname, password, random, pattern, interactive, comment, force, copy):
if random or pattern:
pattern = pattern if pattern else db.config['genpass_pattern']
password = genpass(pattern=pattern)
elif not password:
password = click.prompt('Password [empty]',
hide_input=True,
confirmation_prompt=True,
show_default=False,
default="")
found = db.credential(fullname=fullname)
if found and not force:
message = u"Credential {} already exists. --force to overwrite".format(
fullname)
raise click.ClickException(click.style(message, fg='yellow'))
encrypted = encrypt(password, recipient=db.config['recipient'], homedir=db.config['homedir'])
db.add(fullname=fullname, password=encrypted, comment=comment)
if interactive:
click.edit(filename=db.filename(fullname))
if copy:
clipboard.copy(password)
click.secho('Password copied to clipboard', fg='yellow')
message = u'Added {}{}'.format(fullname, ' [--force]' if force else '')
db.repo.commit(message=message)
@cli.command(help="Copy credential password to clipboard/stdout")
@click.argument("fullname")
@click.option("--passphrase", prompt="Passphrase", hide_input=True)
@click.option("--to", default='clipboard',
type=click.Choice(['stdout', 'clipboard']),
help="Copy password destination")
@click.option("--clear", default=0, help="Automatically clear password from clipboard")
@logging_exception()
@pass_db
def copy(db, fullname, passphrase, to, clear):
ensure_passphrase(passphrase, db.config)
clear = clear if clear else db.config['copy_timeout']
credential = db.credential(fullname)
if not credential:
message = u"Credential '{}' not found".format(fullname)
raise click.ClickException(click.style(message, fg='red'))
encrypted = credential["password"]
decrypted = decrypt(encrypted,
recipient=db.config['recipient'],
passphrase=passphrase,
homedir=db.config['homedir'])
if to == 'clipboard':
clipboard.copy(decrypted, clear)
if not clear:
click.secho('Password copied to clipboard', fg='yellow')
elif to == 'stdout':
click.echo(decrypted)
@cli.command(help="Update credential")
@click.argument("fullname")
@click.option("--name", help="Credential new name")
@click.option("--login", help="Credential new login")
@click.option("--comment", help="Credential new comment")
@click.option("--password", help="Credential new password")
@click.option('--random', is_flag=True, help="Credential new randomly generated password")
@click.option('-i', '--interactive', is_flag=True, help="Interactively edit credential")
@click.option('-P', '--pattern', help="Random password regex pattern")
@logging_exception()
@pass_db
def update(db, fullname, name, login, password, random, interactive, pattern, comment):
credential = db.credential(fullname)
if not credential:
message = u"Credential '{}' not found".format(fullname)
raise click.ClickException(click.style(message, fg='red'))
if random or pattern:
pattern = pattern if pattern else db.config['genpass_pattern']
password = genpass(pattern=pattern)
values = credential.copy()
if any([name, login, password, random, comment]):
values["name"] = name if name else credential["name"]
values["login"] = login if login else credential["login"]
values["password"] = password if password else credential["password"]
values["comment"] = comment if comment else credential["comment"]
else:
values["name"] = click.prompt("Name", default=credential["name"])
values["login"] = click.prompt("Login", default=credential["login"])
values["password"] = click.prompt("Password",
hide_input=True,
default=credential["password"],
confirmation_prompt=True,
show_default=False,
prompt_suffix=" [*****]: ")
values["comment"] = click.prompt("Comment",
default=credential["comment"])
if values != credential:
if values["password"] != credential["password"]:
encrypted = encrypt(values["password"],
recipient=db.config['recipient'],
homedir=db.config['homedir'])
values['password'] = encrypted
db.update(fullname=fullname, values=values)
if interactive:
click.edit(filename=db.filename(fullname))
db.repo.commit(u'Updated {}'.format(credential['fullname']))
@cli.command(help="Remove credential")
@click.argument("fullname")
@click.option("-y", "--yes", is_flag=True, help="Skip confirmation prompt")
@logging_exception()
@pass_db
def remove(db, fullname, yes):
credentials = db.credentials(fullname=fullname)
if credentials:
if not yes:
creds = ', '.join([c['fullname'] for c in credentials])
click.confirm(
u'Remove credentials: ({})'.format(
click.style(creds, 'yellow')),
abort=True
)
for credential in credentials:
db.remove(credential['fullname'])
fullnames = ', '.join(c['fullname'] for c in credentials)
db.repo.commit(u'Removed {}'.format(fullnames))
@cli.command(help="Search credentials by regular expressions")
@click.argument("regex")
@logging_exception()
@pass_db
def search(db, regex):
credentials = db.matches(regex)
if credentials:
table = Table(
db.config['headers'],
table_format=db.config['table_format'],
colors=db.config['colors'],
hidden=['password']
)
click.echo(table.render(credentials))
@cli.command(help="Diagnose database for improvements")
@click.option("--full", is_flag=True, help="Show all entries")
@click.option("--days", default=90, type=int, help="Elapsed days")
@click.option("--passphrase", prompt="Passphrase", hide_input=True)
@logging_exception()
@pass_db
def status(db, full, days, passphrase):
ensure_passphrase(passphrase, db.config)
credentials = db.credentials()
for cred in credentials:
decrypted = decrypt(cred['password'],
recipient=db.config['recipient'],
passphrase=passphrase,
homedir=db.config['homedir'])
cred["password"] = decrypted
if credentials:
limit = db.config['status_repeated_passwords_limit']
credentials = checkers.repeated(credentials, limit)
credentials = checkers.modified(credentials, days)
for c in credentials:
if c['repeated']:
c['repeated'] = click.style(str(c['repeated']), 'red')
if c['modified']:
c['modified'] = click.style(str(c['modified']), 'red')
table = Table(['fullname', 'repeated', 'modified'],
table_format=db.config['table_format'],
missing=click.style('OK', 'green'))
click.echo(table.render(credentials))
@cli.command(name="import", help="Import credentials from path")
@click.argument("filepath", type=click.Path(readable=True, exists=True))
@click.option("-I", "--importer", type=click.Choice(importers.get_names()),
help="Specify an importer")
@click.option("--cols", help="CSV expected columns", callback=validate_cols)
@pass_db
def import_database(db, filepath, importer, cols):
if cols:
importer = importers.get(name='csv')
kwargs = {'cols': cols}
else:
importer = importers.find_importer(filepath)
kwargs = {}
if importer:
credentials = importer.handle(filepath, **kwargs)
for cred in credentials:
encrypted = encrypt(cred['password'],
recipient=db.config['recipient'],
homedir=db.config['homedir'])
cred['password'] = encrypted
db.insert_multiple(credentials)
db.repo.commit(message=u'Imported credentials from {}'.format(filepath))
@cli.command(name="export", help="Export credentials in plain text")
@click.argument("filepath", type=click.File("w"))
@click.option("--json", "as_json", is_flag=True, help="Export as JSON")
@click.option("--passphrase", prompt="Passphrase", hide_input=True)
@logging_exception()
@pass_db
def export_database(db, filepath, as_json, passphrase):
ensure_passphrase(passphrase, db.config)
credentials = db.all()
for cred in credentials:
decrypted = decrypt(cred['password'],
recipient=db.config['recipient'],
passphrase=passphrase,
homedir=db.config['homedir'])
cred["password"] = decrypted
if as_json:
for cred in credentials:
cred["modified"] = str(cred["modified"])
dict_content = {
'handler': 'passpie',
'version': 1.0,
'credentials': [dict(x) for x in credentials],
}
content = json.dumps(dict_content, indent=2)
else:
dict_content = {
'handler': 'passpie',
'version': 1.0,
'credentials': [dict(x) for x in credentials],
}
content = yaml.dump(dict_content, default_flow_style=False)
filepath.write(content)
@cli.command(help='Renew passpie database and re-encrypt credentials')
@click.option("--passphrase", prompt="Passphrase", hide_input=True)
@logging_exception()
@pass_db
def reset(db, passphrase):
ensure_passphrase(passphrase, db.config)
credentials = db.credentials()
if credentials:
# decrypt all credentials
for cred in credentials:
decrypted = decrypt(cred['password'],
recipient=db.config['recipient'],
passphrase=passphrase,
homedir=db.config['homedir'])
cred["password"] = decrypted
# recreate keys if exists
if db.has_keys():
new_passphrase = click.prompt('New passphrase',
hide_input=True,
confirmation_prompt=True)
create_keys(new_passphrase)
# encrypt passwords
for cred in credentials:
cred['password'] = encrypt(cred['password'],
recipient=db.config['recipient'],
homedir=db.config['homedir'])
# remove old and insert re-encrypted credentials
db.purge()
db.insert_multiple(credentials)
# commit
db.repo.commit(message='Reset database')
@cli.command(help='Remove all credentials from database')
@click.option("-y", "--yes", is_flag=True, help="Skip confirmation prompt")
@logging_exception()
@pass_db
def purge(db, yes):
if db.credentials():
if not yes:
alert = u"Purge '{}' credentials".format(len(db.credentials()))
yes = click.confirm(click.style(alert, 'yellow'), abort=True)
if yes:
db.purge()
db.repo.commit(message='Purged database')
@cli.command(help='Shows passpie database changes history')
@click.option("--init", is_flag=True, help="Enable history tracking")
@click.option("--reset-to", default=-1, help="Undo changes in database")
@logging_exception()
@pass_db
def log(db, reset_to, init):
if reset_to >= 0:
logging.info('reset database to index %s', reset_to)
db.repo.reset(reset_to)
elif init:
db.repo.init()
db.repo.commit(message='Initialized git repository', add=True)
else:
commits = []
for number, message in enumerate(db.repo.commit_list()):
number = click.style(str(number), fg='magenta')
message = message.strip()
commits.append(u"[{}] {}".format(number, message))
for commit in reversed(commits):
print(commit)
| {
"repo_name": "scorphus/passpie",
"path": "passpie/cli.py",
"copies": "1",
"size": "20612",
"license": "mit",
"hash": -2365399118738421000,
"line_mean": 37.241187384,
"line_max": 97,
"alpha_frac": 0.607364642,
"autogenerated": false,
"ratio": 4.1758508914100485,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00031785810259191307,
"num_lines": 539
} |
from functools import wraps
import json
import logging
import traceback
from django.core.urlresolvers import reverse as _reverse
from django.utils.http import urlencode
from dimagi.utils.web import get_url_base
from django import http
from django.conf import settings
from django.core.exceptions import PermissionDenied
from corehq.util import global_request
JSON = 'application/json'
logger = logging.getLogger('django.request')
def set_file_download(response, filename):
response["Content-Disposition"] = 'attachment; filename="%s"' % filename
class BadRequest(Exception):
"""Error to be used with @json_error to signal a bad request
Inspired by https://github.com/jsocol/django-jsonview ::
HTTP does not have a great status code for "you submitted a form that didn't
validate," and so Django doesn't support it very well. Most examples just
return 200 OK.
Normally, this is fine. But if you're submitting a form via Ajax, it's nice
to have a distinct status for "OK" and "Nope." The HTTP 400 Bad Request
response is the fallback for issues with a request not-otherwise-specified,
so let's do that.
To cause @json_error to return a 400, just raise this exception with
whatever appropriate error message.
"""
def json_error(f):
"""A decorator for request handlers that returns structured error responses
Inspired by (and some parts shamelessly copied from)
https://github.com/jsocol/django-jsonview
"""
@wraps(f)
def inner(request, *args, **kwargs):
try:
response = f(request, *args, **kwargs)
# Some errors are not exceptions. :\
if isinstance(response, http.HttpResponseNotAllowed):
blob = json.dumps({
'error': 405,
'message': 'HTTP method not allowed.'
})
return http.HttpResponse(blob, status=405, content_type=JSON)
return response
except http.Http404 as e:
blob = json.dumps({
'error': 404,
'message': unicode(e),
})
logger.warning('Not found: %s', request.path,
extra={
'status_code': 404,
'request': request,
})
return http.HttpResponseNotFound(blob, content_type=JSON)
except PermissionDenied as e:
logger.warning(
'Forbidden (Permission denied): %s', request.path,
extra={
'status_code': 403,
'request': request,
})
blob = json.dumps({
'error': 403,
'message': unicode(e),
})
return http.HttpResponseForbidden(blob, content_type=JSON)
except BadRequest as e:
blob = json.dumps({
'error': 400,
'message': unicode(e),
})
return http.HttpResponseBadRequest(blob, content_type=JSON)
except Exception as e:
data = {
'error': 500,
'message': unicode(e)
}
if settings.DEBUG:
data['traceback'] = traceback.format_exc()
return http.HttpResponse(
status=500,
content=json.dumps(data),
content_type=JSON
)
return inner
def get_request():
return global_request.get_request()
def reverse(viewname, params=None, absolute=False, **kwargs):
"""
>>> reverse('create_location', args=["test"], params={"selected": "foo"})
'/a/test/settings/locations/new/?selected=foo'
"""
url = _reverse(viewname, **kwargs)
if absolute:
url = "{}{}".format(get_url_base(), url)
if params:
url = "{}?{}".format(url, urlencode(params))
return url
def absolute_reverse(*args, **kwargs):
return reverse(*args, absolute=True, **kwargs)
| {
"repo_name": "puttarajubr/commcare-hq",
"path": "corehq/util/view_utils.py",
"copies": "2",
"size": "4030",
"license": "bsd-3-clause",
"hash": -4565501469507325000,
"line_mean": 31.5,
"line_max": 80,
"alpha_frac": 0.5771712159,
"autogenerated": false,
"ratio": 4.5027932960893855,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6079964511989385,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import json
import logging
from django.contrib.auth import (
authenticate,
get_user_model,
)
from django.db import transaction
from channels import Group
from core.models import (
Device,
DeviceGroup,
Zone,
)
from django.utils import timezone
class CommandError(Exception):
"""Top-level exception for client errors in commands."""
def handle_replies(f):
"""Decorator for commands to make it easy to return responses to the sending websocket."""
@wraps(f)
def wrapper(message):
user_id = message.channel_session.get('user_id')
message.user = get_user_model().objects.get(id=user_id) if user_id else None
try:
response_data = f(message)
except CommandError as e:
message.reply_channel.send({
'text': json.dumps({
'command': 'error',
'error_message': str(e)
})
})
else:
if isinstance(response_data, str):
response_data = {'command': 'success', 'data': response_data}
if response_data:
if isinstance(response_data, list):
for message_data in response_data:
message.reply_channel.send({
'text': json.dumps(message_data)
})
else:
message.reply_channel.send({
'text': json.dumps(response_data)
})
return wrapper
@transaction.atomic
def handle_login(message, data):
if not data.get('username') or not data.get('password'):
raise CommandError("Login requires username and password")
user = authenticate(username=data['username'], password=data['password'])
zone_name = data['zone']
if user:
message.channel_session['user_id'] = user.pk
message.user = user
# On logging in, make sure the bridge's zone exists (can't be done before login)
Zone.objects.get_or_create(
name=zone_name,
user=user
)
message.channel_session['zone_name'] = zone_name
return {'command': 'login_success'}
else:
raise CommandError("Invalid user or password")
def handle_logout(message, data):
message.channel_session['user_id'] = None
message.user = None
return 'Successfully logged out.'
def handle_update_group(message, data):
command_data = data['data']
if not command_data.get('name'):
raise CommandError("update_group requires a device name!")
DeviceGroup.objects.update_or_create(
name=command_data['name'],
zone=Zone.objects.get(name=message.channel_session['zone_name']),
defaults={
'friendly_name': command_data['friendly_name']
}
)
def handle_update_device(message, data):
""" Update the state of a device """
# Validate command
command_data = data['data']
if not command_data.get('name'):
raise CommandError("update_device requires a device name!")
zone = Zone.objects.get(name=message.channel_session['zone_name'])
group_name = command_data.pop('device_group', '')
try:
device = Device.objects.get(
name=command_data['name'],
device_type=command_data['device_type']
)
if command_data.get('data'):
device.data = command_data['data']
device.last_seen = timezone.now()
device.last_updated = timezone.now()
device.zone = zone
device.save(data_source='device')
except Device.DoesNotExist:
# To allow creating the device, we need to double-check that the device group exists
device = Device(
zone=zone,
**command_data
)
device.save(data_source='device')
if group_name:
device_group, created = DeviceGroup.objects.get_or_create(
name=group_name
)
if device_group not in device.groups.all():
device.groups.add(device_group)
def handle_get_devices(message, data):
command_data = data['data']
filters = {}
if 'name' in command_data:
filters['name'] = command_data['name']
if 'group' in command_data:
filters['device_group__name'] = command_data['group']
if 'type' in command_data:
filters['device_type'] = command_data['type']
devices = Device.objects.filter(**filters)
response_data = [{
'command': 'set_state',
'device_type': device.device_type,
'name': device.name,
'data': device.data
} for device in devices]
return response_data
def handle_keepalive(message, data):
zone = Zone.objects.get(name=message.channel_session['zone_name'])
group = Group(
"user_{}_zone_{}".format(zone.user_id, zone.name)
)
group.send({'text': json.dumps({
'command': 'keepalive',
})})
available_commands = {
'login': handle_login,
'logout': handle_logout,
'echo': lambda message, data: message['text'],
'whoami': lambda message, data: {'command': 'whoami', 'username': str(message.user or 'anonymous')},
'update_device': handle_update_device,
'update_group': handle_update_group,
'get_devices': handle_get_devices,
'keepalive': handle_keepalive,
}
@handle_replies
def handle_command(message):
"""Main entry point for all commands coming in via websocket."""
try:
data = json.loads(message['text'])
except json.decoder.JSONDecodeError as e:
raise CommandError("Invalid json.{}".format(e))
if 'command' not in data:
raise CommandError("No command given")
if data['command'] != 'login' and not message.channel_session['user_id']:
raise Exception("Login required")
# Now, record add the socket to the appropriate groups
user_id = message.channel_session.get('user_id')
if user_id:
user = get_user_model().objects.get(id=user_id)
zone = Zone.objects.get(name=message.channel_session['zone_name'])
Group("user_{}".format(user.id)).add(message.reply_channel)
Group("user_{}_zone_{}".format(user.id, zone.name)).add(message.reply_channel)
command_handler = available_commands.get(data['command'])
if not command_handler:
raise CommandError("Unknown command {}".format(data['command']))
return command_handler(message, data)
| {
"repo_name": "kirberich/phoebe",
"path": "phoebe/core/commands.py",
"copies": "1",
"size": "6483",
"license": "mit",
"hash": -4025875509762329000,
"line_mean": 29.7251184834,
"line_max": 104,
"alpha_frac": 0.604349838,
"autogenerated": false,
"ratio": 4.072236180904523,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00030946790188351726,
"num_lines": 211
} |
from functools import wraps
import json
import os
import traceback
import validators
from jinja2 import Environment, PackageLoader
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
import requests
from requests.auth import HTTPBasicAuth
env = Environment(
loader=PackageLoader('saagie', 'jinja2'),
)
SAAGIE_ROOT_URL = os.environ.get("SAAGIE_ROOT_URL", None)
SAAGIE_USERNAME = None
PLATFORMS_URL = None
SAAGIE_BASIC_AUTH_TOKEN = None
JOBS_URL_PATTERN = None
JOB_URL_PATTERN = None
JOB_UPGRADE_URL_PATTERN = None
SCRIPT_UPLOAD_URL_PATTERN = None
def get_absolute_saagie_url(saagie_url):
if saagie_url.startswith('/'):
return SAAGIE_ROOT_URL + saagie_url
return saagie_url
class ResponseError(Exception):
def __init__(self, status_code):
self.status_code = status_code
super(ResponseError, self).__init__(status_code)
class SaagieHandler(IPythonHandler):
def handle_request(self, method):
data = {k: v[0].decode() for k, v in self.request.arguments.items()}
if 'view' not in data:
self.send_error(404)
return
view_name = data.pop('view')
notebook_path = data.pop('notebook_path', None)
notebook_json = data.pop('notebook_json', None)
notebook = Notebook(notebook_path, notebook_json)
try:
template_name, template_data = views.render(
view_name, notebook=notebook, data=data, method=method)
except ResponseError as e:
self.send_error(e.status_code)
return
except:
template_name = 'internal_error.html'
template_data = {'error': traceback.format_exc()}
self.set_status(500)
template_data.update(
notebook=notebook,
)
template = env.get_template(template_name)
self.finish(template.render(template_data))
def get(self):
self.handle_request('GET')
def post(self):
self.handle_request('POST')
def check_xsrf_cookie(self):
return
class SaagieCheckHandler(IPythonHandler):
def get(self):
self.finish()
class SaagieJobRun:
def __init__(self, job, run_data):
self.job = job
self.id = run_data['id']
self.status = run_data['status']
self.stderr = run_data.get('logs_err', '')
self.stdout = run_data.get('logs_out', '')
class SaagieJob:
@classmethod
def from_id(cls, notebook, platform_id, job_id):
return SaagieJob(
notebook,
requests.get(JOB_URL_PATTERN % (platform_id, job_id), auth=SAAGIE_BASIC_AUTH_TOKEN).json())
def __init__(self, notebook, job_data):
self.notebook = notebook
self.data = job_data
self.platform_id = job_data['platform_id']
self.capsule_type = job_data['capsule_code']
self.id = job_data['id']
self.name = job_data['name']
self.last_run = None
def set_as_current(self):
self.notebook.current_job = self
@property
def url(self):
return (JOBS_URL_PATTERN + '/%s') % (self.platform_id, self.id)
@property
def admin_url(self):
return get_absolute_saagie_url('/#/manager/%s/job/%s'
% (self.platform_id, self.id))
@property
def logs_url(self):
return self.admin_url + '/logs'
@property
def is_started(self):
return self.last_run is not None
def fetch_logs(self):
job_data = requests.get(self.url, auth=SAAGIE_BASIC_AUTH_TOKEN).json()
run_data = job_data.get('last_instance')
if run_data is None or run_data['status'] not in ('SUCCESS', 'FAILED'):
return
run_data = requests.get(
get_absolute_saagie_url('/api/v1/jobtask/%s'
% run_data['id']), auth=SAAGIE_BASIC_AUTH_TOKEN).json()
self.last_run = SaagieJobRun(self, run_data)
@property
def details_template_name(self):
return 'include/python_job_details.html'
def __str__(self):
return self.name
def __eq__(self, other):
if other is None:
return False
return self.platform_id == other.platform_id and self.id == other.id
def __lt__(self, other):
if other is None:
return False
return self.id < other.id
class SaagiePlatform:
SUPPORTED_CAPSULE_TYPES = {'python'}
def __init__(self, notebook, platform_data):
self.notebook = notebook
self.id = platform_data['id']
self.name = platform_data['name']
self.capsule_types = {c['code'] for c in platform_data['capsules']}
@property
def is_supported(self):
return not self.capsule_types.isdisjoint(self.SUPPORTED_CAPSULE_TYPES)
def get_jobs(self):
if not self.is_supported:
return []
jobs_data = requests.get(JOBS_URL_PATTERN % self.id, auth=SAAGIE_BASIC_AUTH_TOKEN).json()
return [SaagieJob(self.notebook, job_data) for job_data in jobs_data
if job_data['category'] == 'processing' and
job_data['capsule_code'] in self.SUPPORTED_CAPSULE_TYPES]
def __eq__(self, other):
return self.id == other.id
class Notebook:
CACHE = {}
def __new__(cls, path, json):
if path in cls.CACHE:
return cls.CACHE[path]
cls.CACHE[path] = new = super(Notebook, cls).__new__(cls)
return new
def __init__(self, path, json_data):
if path is None:
path = 'Untitled.ipynb'
if json_data is None:
json_data = json.dumps({
'cells': [],
'metadata': {'kernelspec': {'name': 'python3'}}})
self.path = path
self.json = json.loads(json_data)
# In cached instances, current_job is already defined.
if not hasattr(self, 'current_job'):
self.current_job = None
@property
def name(self):
return os.path.splitext(os.path.basename(self.path))[0]
@property
def kernel_name(self):
return self.json['metadata']['kernelspec']['name']
@property
def kernel_display_name(self):
return self.json['metadata']['kernelspec']['display_name']
def get_code_cells(self):
return [cell['source'] for cell in self.json['cells']
if cell['cell_type'] == 'code']
def get_code(self, indices=None):
cells = self.get_code_cells()
if indices is None:
indices = list(range(len(cells)))
return '\n\n\n'.join([cells[i] for i in indices])
def get_platforms(self):
return [SaagiePlatform(self, platform_data)
for platform_data in requests.get(PLATFORMS_URL, auth=SAAGIE_BASIC_AUTH_TOKEN).json()]
class ViewsCollection(dict):
def add(self, func):
self[func.__name__] = func
return func
def render(self, view_name, notebook, data=None, method='GET', **kwargs):
if data is None:
data = {}
try:
view = views[view_name]
except KeyError:
raise ResponseError(404)
template_data = view(method, notebook, data, **kwargs)
if isinstance(template_data, tuple):
template_name, template_data = template_data
else:
template_name = view.__name__ + '.html'
return template_name, template_data
views = ViewsCollection()
@views.add
def modal(method, notebook, data):
return {}
def clear_basic_auth_token():
global SAAGIE_BASIC_AUTH_TOKEN
SAAGIE_BASIC_AUTH_TOKEN = None
# Init an empty Basic Auth token on first launch
clear_basic_auth_token()
def is_logged():
if SAAGIE_ROOT_URL is None or SAAGIE_BASIC_AUTH_TOKEN is None:
return False
else:
# Check if Basic token is still valid
is_logged_in = False
try:
response = requests.get(SAAGIE_ROOT_URL + '/api/v1/user-current', auth=SAAGIE_BASIC_AUTH_TOKEN, allow_redirects=False)
is_logged_in = response.ok
except (requests.ConnectionError, requests.RequestException, requests.HTTPError, requests.Timeout) as err:
print ('Error while trying to connect to Saagie: ', err)
if is_logged_in is not True:
# Remove Basic Auth token from globals. It will force a new login phase.
clear_basic_auth_token()
return is_logged_in
def define_globals(saagie_root_url, saagie_username):
if saagie_root_url is not None:
global SAAGIE_ROOT_URL
global SAAGIE_USERNAME
global PLATFORMS_URL
global JOBS_URL_PATTERN
global JOB_URL_PATTERN
global JOB_UPGRADE_URL_PATTERN
global SCRIPT_UPLOAD_URL_PATTERN
SAAGIE_USERNAME = saagie_username
SAAGIE_ROOT_URL = saagie_root_url.strip("/")
PLATFORMS_URL = SAAGIE_ROOT_URL + '/api/v1/platform'
JOBS_URL_PATTERN = PLATFORMS_URL + '/%s/job'
JOB_URL_PATTERN = JOBS_URL_PATTERN + '/%s'
JOB_UPGRADE_URL_PATTERN = JOBS_URL_PATTERN + '/%s/version'
SCRIPT_UPLOAD_URL_PATTERN = JOBS_URL_PATTERN + '/upload'
@views.add
def login_form(method, notebook, data):
if method == 'POST':
# check if the given Saagie URL is well formed
if not validators.url(data['saagie_root_url']):
return {'error': 'Invalid URL', 'saagie_root_url': data['saagie_root_url'] or '', 'username': data['username'] or ''}
define_globals(data['saagie_root_url'], data['username'])
try:
basic_token = HTTPBasicAuth(data['username'], data['password'])
current_user_response = requests.get(SAAGIE_ROOT_URL + '/api/v1/user-current', auth=basic_token, allow_redirects=False)
if current_user_response.ok:
# Login succeeded, keep the basic token for future API calls
global SAAGIE_BASIC_AUTH_TOKEN
SAAGIE_BASIC_AUTH_TOKEN = basic_token
except (requests.ConnectionError, requests.RequestException, requests.HTTPError, requests.Timeout) as err:
print ('Error while trying to connect to Saagie: ', err)
return {'error': 'Connection error', 'saagie_root_url': SAAGIE_ROOT_URL, 'username': SAAGIE_USERNAME or ''}
if SAAGIE_BASIC_AUTH_TOKEN is not None:
return views.render('capsule_type_chooser', notebook)
return {'error': 'Invalid URL, username or password.', 'saagie_root_url': SAAGIE_ROOT_URL, 'username': SAAGIE_USERNAME or ''}
if is_logged():
return views.render('capsule_type_chooser', notebook)
return {'error': None, 'saagie_root_url': SAAGIE_ROOT_URL or '', 'username': SAAGIE_USERNAME or ''}
def login_required(view):
@wraps(view)
def inner(method, notebook, data, *args, **kwargs):
if not is_logged():
return views.render('login_form', notebook)
return view(method, notebook, data, *args, **kwargs)
return inner
@views.add
@login_required
def capsule_type_chooser(method, notebook, data):
return {'username': SAAGIE_USERNAME}
def get_job_form(method, notebook, data):
context = {'platforms': notebook.get_platforms()}
context['values'] = ({'current': {'options': {}}} if notebook.current_job is None
else notebook.current_job.data)
return context
def create_job_base_data(data):
return {
'platform_id': data['saagie-platform'],
'category': 'processing',
'name': data['job-name'],
'description': data['description'],
'current': {
'cpu': data['cpu'],
'disk': data['disk'],
'memory': data['ram'],
'isInternalSubDomain': False,
'isInternalPort': False,
'options': {}
}
}
def upload_python_script(notebook, data):
code = notebook.get_code(map(int, data.get('code-lines', '').split('|')))
files = {'file': (data['job-name'] + '.py', code)}
return requests.post(
SCRIPT_UPLOAD_URL_PATTERN % data['saagie-platform'],
files=files, auth=SAAGIE_BASIC_AUTH_TOKEN).json()['fileName']
@views.add
@login_required
def python_job_form(method, notebook, data):
if method == 'POST':
platform_id = data['saagie-platform']
job_data = create_job_base_data(data)
job_data['capsule_code'] = 'python'
job_data['always_email'] = False
job_data['manual'] = True
job_data['retry'] = ''
current = job_data['current']
current['options']['language_version'] = data['language-version']
current['releaseNote'] = data['release-note']
current['template'] = data['shell-command']
current['file'] = upload_python_script(notebook, data)
new_job_data = requests.post(JOBS_URL_PATTERN % platform_id,
json=job_data, auth=SAAGIE_BASIC_AUTH_TOKEN).json()
job = SaagieJob(notebook, new_job_data)
job.set_as_current()
return views.render('starting_job', notebook, {'job': job})
context = get_job_form(method, notebook, data)
context['action'] = '/saagie?view=python_job_form'
context['username'] = SAAGIE_USERNAME
return context
@views.add
@login_required
def update_python_job(method, notebook, data):
if method == 'POST':
job = notebook.current_job
platform_id = job.platform_id
data['saagie-platform'] = platform_id
data['job-name'] = job.name
data['description'] = ''
current = create_job_base_data(data)['current']
current['options']['language_version'] = data['language-version']
current['releaseNote'] = data['release-note']
current['template'] = data['shell-command']
current['file'] = upload_python_script(notebook, data)
requests.post(JOB_UPGRADE_URL_PATTERN % (platform_id, job.id),
json={'current': current}, auth=SAAGIE_BASIC_AUTH_TOKEN)
job.last_run = None
return views.render('starting_job', notebook, {'job': job})
context = get_job_form(method, notebook, data)
context['action'] = '/saagie?view=update_python_job'
context['username'] = SAAGIE_USERNAME
return context
@views.add
@login_required
def select_python_job(method, notebook, data):
if method == 'POST':
platform_id, job_id = data['job'].split('-')
notebook.current_job = SaagieJob.from_id(notebook, platform_id, job_id)
return views.render('update_python_job', notebook, data)
jobs_by_platform = []
for platform in notebook.get_platforms():
jobs = platform.get_jobs()
if jobs:
jobs_by_platform.append((platform,
list(sorted(jobs, reverse=True))))
return {'jobs_by_platform': jobs_by_platform,
'action': '/saagie?view=select_python_job', 'username': SAAGIE_USERNAME}
@views.add
@login_required
def unsupported_kernel(method, notebook, data):
return {'username': SAAGIE_USERNAME}
@views.add
@login_required
def starting_job(method, notebook, data):
job = notebook.current_job
job.fetch_logs()
if job.is_started:
return views.render('started_job', notebook, {'job': job})
return {'job': job, 'username': SAAGIE_USERNAME}
@views.add
@login_required
def started_job(method, notebook, data):
return {'job': notebook.current_job, 'username': SAAGIE_USERNAME}
@views.add
def logout(method, notebook, data):
global SAAGIE_BASIC_AUTH_TOKEN
global SAAGIE_ROOT_URL
global SAAGIE_USERNAME
SAAGIE_BASIC_AUTH_TOKEN = None
SAAGIE_ROOT_URL = None
SAAGIE_USERNAME = None
return {}
def load_jupyter_server_extension(nb_app):
web_app = nb_app.web_app
base_url = web_app.settings['base_url']
route_pattern = url_path_join(base_url, '/saagie')
web_app.add_handlers('.*$', [(route_pattern, SaagieHandler)])
route_pattern = url_path_join(base_url, '/saagie/check')
web_app.add_handlers('.*$', [(route_pattern, SaagieCheckHandler)])
| {
"repo_name": "saagie/jupyter-saagie-plugin",
"path": "saagie/server_extension.py",
"copies": "1",
"size": "16090",
"license": "apache-2.0",
"hash": 8740069840236345000,
"line_mean": 31.9713114754,
"line_max": 133,
"alpha_frac": 0.6113735239,
"autogenerated": false,
"ratio": 3.5971383858707804,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9702118035592313,
"avg_score": 0.001278774835693522,
"num_lines": 488
} |
from functools import wraps
import json
import os
from django.shortcuts import render, render_to_response
from django import forms
from django import VERSION as DJANGO_VERSION
from django.template import RequestContext
from django.db.models import signals as signalmodule
from django.http import HttpResponse
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.utils import six
__all__ = ['render_to', 'signals', 'ajax_request', 'autostrip']
def render_to(template=None, content_type=None):
"""
Decorator for Django views that sends returned dict to render_to_response
function.
Template name can be decorator parameter or TEMPLATE item in returned
dictionary. RequestContext always added as context instance.
If view doesn't return dict then decorator simply returns output.
Parameters:
- template: template name to use
- content_type: content type to send in response headers
Examples:
# 1. Template name in decorator parameters
@render_to('template.html')
def foo(request):
bar = Bar.object.all()
return {'bar': bar}
# equals to
def foo(request):
bar = Bar.object.all()
return render_to_response('template.html',
{'bar': bar},
context_instance=RequestContext(request))
# 2. Template name as TEMPLATE item value in return dictionary.
if TEMPLATE is given then its value will have higher priority
than render_to argument.
@render_to()
def foo(request, category):
template_name = '%s.html' % category
return {'bar': bar, 'TEMPLATE': template_name}
#equals to
def foo(request, category):
template_name = '%s.html' % category
return render_to_response(template_name,
{'bar': bar},
context_instance=RequestContext(request))
"""
def renderer(function):
@wraps(function)
def wrapper(request, *args, **kwargs):
output = function(request, *args, **kwargs)
if not isinstance(output, dict):
return output
tmpl = output.pop('TEMPLATE', template)
if tmpl is None:
template_dir = os.path.join(*function.__module__.split('.')[:-1])
tmpl = os.path.join(template_dir, function.func_name + ".html")
# Explicit version check to avoid swallowing other exceptions
if DJANGO_VERSION >= (1, 9):
return render(request, tmpl, output,
content_type=content_type)
else:
return render_to_response(tmpl, output,
context_instance=RequestContext(request),
content_type=content_type)
return wrapper
return renderer
class Signals(object):
'''
Convenient wrapper for working with Django's signals (or any other
implementation using same API).
Example of usage::
# connect to registered signal
@signals.post_save(sender=YourModel)
def sighandler(instance, **kwargs):
pass
# connect to any signal
signals.register_signal(siginstance, signame) # and then as in example above
or
@signals(siginstance, sender=YourModel)
def sighandler(instance, **kwargs):
pass
In any case defined function will remain as is, without any changes.
(c) 2008 Alexander Solovyov, new BSD License
'''
def __init__(self):
self._signals = {}
# register all Django's default signals
for k, v in signalmodule.__dict__.items():
# that's hardcode, but IMHO it's better than isinstance
if not k.startswith('__') and k != 'Signal':
self.register_signal(v, k)
def __getattr__(self, name):
return self._connect(self._signals[name])
def __call__(self, signal, **kwargs):
def inner(func):
signal.connect(func, **kwargs)
return func
return inner
def _connect(self, signal):
def wrapper(**kwargs):
return self(signal, **kwargs)
return wrapper
def register_signal(self, signal, name):
self._signals[name] = signal
signals = Signals()
FORMAT_TYPES = {
'application/json': lambda response: json.dumps(response, cls=DjangoJSONEncoder),
'text/json': lambda response: json.dumps(response, cls=DjangoJSONEncoder),
}
try:
import yaml
except ImportError:
pass
else:
FORMAT_TYPES.update({
'application/yaml': yaml.dump,
'text/yaml': yaml.dump,
})
def ajax_request(func):
"""
If view returned serializable dict, returns response in a format requested
by HTTP_ACCEPT header. Defaults to JSON if none requested or match.
Currently supports JSON or YAML (if installed), but can easily be extended.
example:
@ajax_request
def my_view(request):
news = News.objects.all()
news_titles = [entry.title for entry in news]
return {'news_titles': news_titles}
"""
@wraps(func)
def wrapper(request, *args, **kwargs):
for accepted_type in request.META.get('HTTP_ACCEPT', '').split(','):
if accepted_type in FORMAT_TYPES.keys():
format_type = accepted_type
break
else:
format_type = 'application/json'
response = func(request, *args, **kwargs)
if not isinstance(response, HttpResponse):
if hasattr(settings, 'FORMAT_TYPES'):
format_type_handler = settings.FORMAT_TYPES[format_type]
if hasattr(format_type_handler, '__call__'):
data = format_type_handler(response)
elif isinstance(format_type_handler, six.string_types):
mod_name, func_name = format_type_handler.rsplit('.', 1)
module = __import__(mod_name, fromlist=[func_name])
function = getattr(module, func_name)
data = function(response)
else:
data = FORMAT_TYPES[format_type](response)
response = HttpResponse(data, content_type=format_type)
response['content-length'] = len(data)
return response
return wrapper
def autostrip(cls):
"""
strip text fields before validation
example:
class PersonForm(forms.Form):
name = forms.CharField(min_length=2, max_length=10)
email = forms.EmailField()
PersonForm = autostrip(PersonForm)
#or you can use @autostrip in python >= 2.6
Author: nail.xx
"""
fields = [(key, value) for key, value in cls.base_fields.items() if isinstance(value, forms.CharField)]
for field_name, field_object in fields:
def get_clean_func(original_clean):
return lambda value: original_clean(value and value.strip())
clean_func = get_clean_func(getattr(field_object, 'clean'))
setattr(field_object, 'clean', clean_func)
return cls
| {
"repo_name": "kabakchey/django-annoying",
"path": "annoying/decorators.py",
"copies": "1",
"size": "7182",
"license": "bsd-3-clause",
"hash": 7466009084729810000,
"line_mean": 31.4977375566,
"line_max": 107,
"alpha_frac": 0.6069340017,
"autogenerated": false,
"ratio": 4.43059839605182,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.553753239775182,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import json
import os
from dotenv import Dotenv
from flask import Flask
from flask import redirect
from flask import render_template
from flask import request, Response
from flask import send_from_directory
from flask import session
import requests
import constants
import subprocess
env = None
try:
env = Dotenv('./.env')
except IOError:
env = os.environ
app = Flask(__name__, static_url_path='')
app.secret_key = constants.SECRET_KEY
app.debug = True
# Requires authentication annotation
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
if constants.PROFILE_KEY not in session:
return redirect('/')
return f(*args, **kwargs)
return decorated
@app.after_request
def apply_caching(response):
response.headers.add('Cache-Control', 'no-store, no-cache, must-revalidate, post-check=0, pre-check=0')
return response
# Controllers API
@app.route('/')
def home():
return render_template('home.html', env=env)
@app.route('/upload_file')
@requires_auth
def uploadFile():
return render_template('uploadFile.html', user=session[constants.PROFILE_KEY]["name"])
@app.route('/upload_success', methods=['GET', 'POST'])
@requires_auth
def uploadSuccess():
file_to_analyze = request.files['file']
user = session[constants.PROFILE_KEY]["name"]
if (file_to_analyze != None):
file_name = file_to_analyze.filename
data_path = "/home/ubuntu/" + user + "_data"
if not os.path.exists(data_path):
os.makedirs(data_path)
os.chdir(data_path)
print str(file_name)
if (str(file_name).endswith(".c") or file_name.endswith(".cpp")):
# REMOVE THE OLD FILE AND RESULTS BEFORE ADDING THE NEW ONE
delete_old_files(data_path, file_name, file_to_analyze)
print "BEFORE SUBPROCESS OPENS!!!!"
print "DATA PATH: " + "flawfinder " + data_path + "/" + file_name + " > " + data_path + "/output.txt"
subprocess.Popen(
"flawfinder " + data_path + "/" + file_name + " > " + data_path + "/output.txt", shell=True)
return render_template("uploadSuccess.html")
elif (str(file_name).endswith(".py")):
# REMOVE THE OLD FILE AND RESULTS BEFORE ADDING THE NEW ONE
delete_old_files(data_path, file_name, file_to_analyze)
subprocess.Popen(
"pylint " + data_path + "/" + file_name + " > " + data_path + "/output.txt", shell=True)
return render_template("uploadSuccess.html")
else:
return render_template("notSuccessful.html")
return render_template("notSuccessful.html")
def delete_old_files(data_path, file_name, file_to_analyze):
for file in os.listdir(data_path):
file_path = os.path.join(data_path, file)
os.remove(file_path)
file_to_analyze.save(os.path.join(data_path, file_name))
""" AUTH0 was used to provide login functionality. The following code has been taken from
https://auth0.com/docs/quickstart/backend/python"""
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
@app.route('/view_result')
@requires_auth
def viewResult():
user = session[constants.PROFILE_KEY]["name"]
if(os.path.exists("/home/ubuntu/" + user + "_data/output.txt")):
with open("/home/ubuntu/" + user + "_data/output.txt") as output:
output_text = output.read()
else:
output_text= "No results to show!!"
return render_template('viewResult.html', user=session[constants.PROFILE_KEY]["name"], output=output_text)
@app.route('/public/<path:filename>')
def static_files(filename):
return send_from_directory('./public', filename)
@app.route('/callback', methods=['GET'])
def callback_handling():
code = request.args.get(constants.CODE_KEY)
json_header = {constants.CONTENT_TYPE_KEY: constants.APP_JSON_KEY}
token_url = 'https://{auth0_domain}/oauth/token'.format(auth0_domain=env[constants.AUTH0_DOMAIN])
token_payload = {
constants.CLIENT_ID_KEY: env[constants.AUTH0_CLIENT_ID],
constants.CLIENT_SECRET_KEY: env[constants.AUTH0_CLIENT_SECRET],
constants.REDIRECT_URI_KEY: env[constants.AUTH0_CALLBACK_URL],
constants.CODE_KEY: code,
constants.GRANT_TYPE_KEY: constants.AUTHORIZATION_CODE_KEY
}
token_info = requests.post(token_url, data=json.dumps(token_payload),
headers=json_header).json()
user_url = 'https://{auth0_domain}/userinfo?access_token={access_token}'.format(
auth0_domain=env[constants.AUTH0_DOMAIN], access_token=token_info[constants.ACCESS_TOKEN_KEY])
user_info = requests.get(user_url).json()
session[constants.PROFILE_KEY] = user_info
return redirect('/upload_file')
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
@app.route('/logout')
@requires_auth
def logout():
print "logging out..."
session.pop(constants.PROFILE_KEY, None)
logout = requests.get("https://sanikasudhalkar.auth0.com/v2/logout")
return render_template('logout.html')
if __name__ == '__main__':
app.run(host='ec2-35-164-166-43.us-west-2.compute.amazonaws.com', debug=True)
| {
"repo_name": "sanikasudhalkar/Projects",
"path": "StaticAnalysisTools/main.py",
"copies": "1",
"size": "5274",
"license": "mit",
"hash": -2485740649010356000,
"line_mean": 33.6973684211,
"line_max": 113,
"alpha_frac": 0.6338642397,
"autogenerated": false,
"ratio": 3.4493132766514063,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4583177516351406,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import json
import pdb
from core.db.manager import DataHubManager
from django.db import connection
from util import pick
def get_cache(username):
try:
manager = DataHubManager(user=username)
manager.execute_sql(
"create table _dbwipes_cache(key varchar, val text)")
except:
pass
def make_cache(f):
@wraps(f)
def _f(self, *args, **kwargs):
try:
key = str(map(str, (f.__name__, self.dbname, self.tablename, self.where, self.nbuckets, map(str, args))))
manager = DataHubManager(user=self.username)
vals = manager.execute_sql(
'select val from _dbwipes_cache where key = %s',
params = (key,))['tuples']
if len(vals):
return json.loads(vals[0][0])
except Exception as e:
print e
pdb.set_trace()
res = f(self, *args, **kwargs)
if key:
manager = DataHubManager(user=self.username)
manager.execute_sql(
'insert into _dbwipes_cache values(%s, %s)',
params = (key, json.dumps(res, default=json_handler)))
return res
return _f
def json_handler(o):
if hasattr(o, 'isoformat'):
return o.isoformat()
class Summary(object):
def __init__(self, dbname, tablename, username, nbuckets=50, where=''):
self.username = username
self.dbname = dbname
self.tablename = tablename
self.nbuckets = nbuckets
self.where = ''
where = where.strip()
if where:
self.where = 'WHERE %s' % where
# make sure cache exists
get_cache(username)
self.nrows = self.get_num_rows()
self.col_types = self.get_columns_and_types()
def __call__(self):
stats = []
for col, typ in self.col_types:
#print "stats for: %s\t%s" % (col, typ)
col_stats = self.get_col_stats(col, typ)
if col_stats is None:
#print "\tgot None"
continue
#print "\tgot %d" % (len(col_stats))
stats.append((col, typ, col_stats))
return stats
def close(self):
pass
def reset_cache(self):
q = """delete from cache where key like '%%%%%s%%%%%s%%%%'""" % (str(self.engine), self.tablename)
manager = DataHubManager(user=self.username)
manager.execute_sql(q)
def query(self, q, *args):
"""
Summaries using other engines only need to override this method
"""
manager = DataHubManager(user=self.username)
return manager.execute_sql(q, params=args)['tuples']
@make_cache
def get_num_rows(self):
q = "SELECT count(*) from %s" % self.tablename
return self.query(q)[0][0]
@make_cache
def get_distinct_count(self, col):
q = "SELECT count(distinct %s) FROM %s %s" % (col, self.tablename, self.where)
return self.query(q)[0][0]
@make_cache
def get_column_counts(self, cols):
q = """SELECT %s FROM %s"""
select = ["count(distinct %s)" % col for col in cols]
select = ", ".join(select)
q = q % (select, self.tablename)
counts = tuple(self.query(q)[0])
return dict(zip(cols, counts))
@make_cache
def get_columns_and_types(self):
manager = DataHubManager(user=self.username)
tokens = self.tablename.split('.')
repo = tokens[0]
table = tokens[1]
rows = manager.get_schema(repo, table)
ret = []
for col, typ in rows:
if typ == 'text':
typ = 'str'
if 'double' in typ:
typ = 'num'
if 'int' in typ:
typ = 'num'
if 'date' in typ or 'time' in typ:
typ = 'time'
ret.append((str(col), str(typ)) )
return ret
@make_cache
def get_columns(self):
"""
engine specific way to get table columns
"""
return pick(self.get_columns_and_types(), 0)
@make_cache
def get_type(self, col_name):
return dict(self.get_columns_and_types()).get(col_name, None)
def get_col_groupby(self, col_name, col_type):
if col_type == None:
return None
groupby = None
if 'time' == col_type:
groupby = self.get_time_stats(col_name)
if 'date' in col_type or 'timestamp' in col_type:
groupby = self.get_date_stats(col_name)
return groupby
@make_cache
def get_col_stats(self, col_name, col_type=None):
if col_type is None:
col_type = self.get_type(col_name)
#if col_type.startswith('_'):
#return None
numerics = ['int', 'float', 'double', 'numeric', 'num']
is_numeric = any([s in col_type for s in numerics])
if is_numeric:
stats = self.get_numeric_stats(col_name)
return stats
if any([s in col_type for s in ['char', 'text', 'str']]):
return self.get_char_stats(col_name)
groupby = self.get_col_groupby(col_name, col_type)
if groupby:
stats = self.get_group_stats(col_name, groupby)
return stats
return None
def get_group_stats(self, col_name, groupby):
q = """select %s as GRP, min(%s), max(%s), count(*)
from %s %s group by GRP
order by GRP limit %d"""
q = q % (groupby, col_name, col_name, self.tablename, self.where, self.nbuckets)
rows = [{ 'val': x, 'count': count, 'range':[minv, maxv]} for (x, minv, maxv, count) in self.query(q)]
return rows
def get_numeric_stats(self, c):
ndistinct = self.get_distinct_count(c)
if ndistinct == 0:
return []
if ndistinct == 1:
if self.where:
q = "SELECT %s from %s %s AND %s is not null"
args = (c, self.tablename, self.where, c)
else:
q = "SELECT %s from %s WHERE %s is not null"
args = (c, self.tablename, c)
val = self.query(q % args)[0][0]
return [{'val': val, 'count': self.nrows, 'range': [val, val]}]
q = """
with bound as (
SELECT min(%s) as min, max(%s) as max, avg(%s) as avg, stddev(%s) as std FROM %s %s
)
SELECT width_bucket(%s::numeric, (avg-2.5*std), (avg+2.5*std), %d) as bucket,
min(%s) as min,
max(%s) as max,
count(*) as count
FROM %s, bound
%s
GROUP BY bucket
"""
q = q % (c, c, c, c, self.tablename, self.where, c, self.nbuckets, c, c, self.tablename, self.where)
q = """
with TMP as (
SELECT 2.5 * stddev(%s) / %d as block FROM %s %s
)
SELECT (%s/block)::int*block as bucket,
min(%s) as min,
max(%s) as max,
count(*) as count
FROM %s, TMP
%s
GROUP BY bucket
ORDER BY bucket
"""
q = q % (c, self.nbuckets, self.tablename, self.where, c, c, c, self.tablename, self.where)
stats = []
for (val, minv, maxv, count) in self.query(q):
if val is None:
stats.append({
'val': None,
'count': count,
'range': [minv, maxv]
})
else:
stats.append({
'val': (maxv+minv)/2.,
'count': count,
'range': [minv, maxv]
})
return stats
def get_char_stats(self, col_name):
q = """select %s as GRP, min(%s), max(%s), count(*)
FROM %s
%s
GROUP BY GRP
ORDER BY count(*) desc
LIMIT %d"""
q = q % (col_name, col_name, col_name, self.tablename, self.where, self.nbuckets)
rows = [{ 'val': x, 'count': count, 'range':[minv, maxv]} for (x, minv, maxv, count) in self.query(q)]
return rows
groupby = col_name
return groupby
return self.get_group_stats(col_name, groupby)
def get_time_stats(self, col_name):
return "date_trunc('hour', %s)::time" % col_name
def get_date_stats(self, col_name):
q = "select max(%s)::date, min(%s)::date, EXTRACT(EPOCH FROM (max(%s::timestamp) - min(%s::timestamp)))/60 as minutes from %s"
q = q % (col_name, col_name, col_name, col_name, self.tablename)
(maxv, minv, nminutes) = self.query(q)[0]
if maxv is None or minv is None or nminutes is None:
return None
ndays = nminutes / 60 / 24
var = "%s::timestamp" % col_name
if ndays == 0:
groupby = "date_trunc('hour', %s)" % var
elif ndays <= 30:
groupby = "date_trunc('day', %s)" % var
elif ndays <= 50 * 7:
groupby = "date_trunc('week', %s)" % var
elif ndays <= 365 * 12:
groupby = "date_trunc('month', %s)" % var
else:
groupby = "date_trunc('year', %s)" % var
return groupby
| {
"repo_name": "RogerTangos/datahub-stub",
"path": "src/apps/dbwipes/summary.py",
"copies": "1",
"size": "8173",
"license": "mit",
"hash": 3274024851033349600,
"line_mean": 25.4498381877,
"line_max": 130,
"alpha_frac": 0.5771442555,
"autogenerated": false,
"ratio": 3.1938257131692067,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9172878508840434,
"avg_score": 0.01961829196575457,
"num_lines": 309
} |
from functools import wraps
import json
import sys
import traceback
from copy import deepcopy
from flask.ext import restful
from flask import make_response, request, Markup, g, current_app
from werkzeug.exceptions import HTTPException
from hoops.status import library as status_library
from hoops.response import APIResponse
from hoops.exc import APIException, APIValidationException
from hoops.status import APIStatus
from hoops.utils import Struct
import logging
error_map = {
200: status_library.API_OK,
403: status_library.API_FORBIDDEN,
404: status_library.API_RESOURCE_NOT_FOUND,
405: status_library.API_INVALID_REQUEST_METHOD,
500: status_library.API_UNHANDLED_EXCEPTION,
501: status_library.API_CODE_NOT_IMPLEMENTED,
}
api_logger = logging.getLogger('api.info')
request_logger = logging.getLogger('api.request')
api_error_logger = logging.getLogger('api.error')
error_logger = logging.getLogger('error')
class Resource(restful.Resource):
# applies to all inherited resources; OauthAPI will append 'require_oauth' on init
method_decorators = []
class API(restful.Api):
def __init__(self, *args, **kwargs):
super(API, self).__init__(*args, **kwargs)
self.representations = {
#'application/xml': output_xml,
#'text/xml': output_xml,
'application/json': output_json,
}
def make_response(self, *args, **kwargs):
response = restful.Api.make_response(self, *args, **kwargs)
try:
message = getattr(args[0], 'response', None).get('status_message', None)
except:
message = args[0]
request_logger.info('%s: %s', response.data, message)
if response.status_code >= 500:
error_logger.exception('%s: %s', response.data, message)
return response
def handle_error(self, e):
if isinstance(e, HTTPException):
return self.make_response(
APIResponse(None, status=error_map.get(e.code, APIStatus(http_status=e.code, status_code=e.code * 10, message=e.description))),
e.code)
elif isinstance(e, APIValidationException):
return self.make_response(
APIResponse(None, status=e.status, extra=e.extra),
e.status.http_status)
elif isinstance(e, APIException):
return self.make_response(
APIResponse(None, status=e.status),
e.status.http_status)
status = status_library.API_UNHANDLED_EXCEPTION
if current_app.config.get('DEBUG'):
tb_info = sys.exc_info()
return self.make_response(
APIResponse(None, status=status, extra={
'exception': traceback.format_exception_only(tb_info[0], tb_info[1])[0],
'traceback': traceback.extract_tb(tb_info[2])
}), status.http_status)
# We don't use the default error handler
# e.g.: return super(API, self).handle_error(e)
return self.make_response(
APIResponse(None, status=status), status.http_status)
def _should_use_fr_error_handler(self):
""" Determine if error should be handled with FR or default Flask
Return True since we need all errors handled in above handler.
"""
return True
def mediatypes(self):
"""Replaces the acceptable media types with application/json if the request came from a browser.
Also looks for output_type parameter.
"""
if request.args.get('output_format', '') == 'xml' or request.form.get('output_format', '') == 'xml':
return ['application/xml']
elif request.args.get('output_format', '') == 'json' or request.form.get('output_format', '') == 'json':
return ['application/json']
if (('text/html' in request.accept_mimetypes or
'application/xhtml+xml' in request.accept_mimetypes)
and 'Mozilla' in request.user_agent.string):
return ['application/json']
return super(API, self).mediatypes()
def register(self, cls):
routes = [cls.route] if cls.route else []
object_route = getattr(cls, 'object_route', None)
if object_route:
routes.append(object_route)
if routes:
[api_logger.debug('Adding route %s' % route) for route in routes]
self.add_resource(cls, *routes, endpoint=cls.route)
class OAuthAPI(API):
'''Only a single API at a time can be supported. Using OAuthAPI causes all resources to require OAuth.'''
def __init__(self, *args, **kwargs):
# TODO:
# - make oauth app specific (e.g. extra params, diff Resource for Oauth inheritence, etc?)
# - allow adhok usage of oauth on some Resource objects
# - define alternate oauth arg handling methods besides static creds
oauth_args = kwargs['oauth_args']
del(kwargs['oauth_args'])
super(API, self).__init__(*args, **kwargs)
Resource.method_decorators = [require_oauth]
Resource.oauth_args = Struct(**oauth_args)
def require_oauth(func):
'''Auth wrapper from http://flask-restful.readthedocs.org/en/latest/extending.html?highlight=authentication'''
@wraps(func)
def wrapper(*args, **kwargs):
from hoops.oauth_provider import oauth_authentication
# TODO: read server_oauth_creds from args/func
server_oauth_creds = {}
oauth_creds = oauth_authentication(server_oauth_creds)
if not oauth_creds:
# This is highly unlikely to occur, as oauth raises exceptions on problems
restful.abort(401) # pragma: no cover
return func(*args, **kwargs)
return wrapper
def prepare_output(data, code, headers=None):
if not isinstance(data, APIResponse):
data = APIResponse(data, status=error_map.get(code, APIStatus(
http_status=code, status_code=code * 10, message=data
)))
out = data.to_json()
code = data.status.http_status
return_string = unicode(data.response)
response_data = unicode(data.response.get('response_data')) if data.response.get('response_data') else return_string
request_logger.info('Response %d chars: %s...', len(return_string), unicode(response_data[:50]))
request_logger.debug('Response body: %s', return_string)
return out, code
def output_json(data, code, headers=None):
"""Makes a Flask response with a JSON encoded body"""
out, code = prepare_output(data, code, headers)
resp = make_response(json.dumps(out,
sort_keys=True,
indent=4,
separators=(',', ': ')), code)
resp.headers.extend(headers or {})
return resp
#def output_xml(data, code, headers=None):
# """Makes a Flask response with a XML encoded body"""
# out, code = prepare_output(data, code, headers)
# resp = xmlify(out)
# resp.code = code
# resp.headers.extend(headers or {})
#
# return resp
#
#
#def xmlify(output):
# """
# xmlfy takes a dictionary and converts it to xml.
# """
# XML_DECLARATION = '<?xml version="1.0" encoding="UTF-8"?>'
# nodes = serialize_xml({'jetlaunch': output})
#
# r = make_response(Markup(XML_DECLARATION + ''.join(etree.tostring(node) for node in nodes)))
# r.mimetype = 'text/xml'
#
# return r
#
#
#def serialize_xml(root):
# node = None
# node_stack = []
# for key in root.keys():
# node = etree.Element(key)
# if isinstance(root[key], dict):
# inner_node_stack = serialize(root[key])
# for inner_node in inner_node_stack:
# node.append(inner_node)
# elif isinstance(root[key], list):
# for item in root[key]:
# itemnode = etree.Element('item') # magic string
# inner_node_stack = serialize(item)
# for inner_node in inner_node_stack:
# itemnode.append(inner_node)
# node.append(itemnode)
# else:
# if root[key] is not None:
# node.text = unicode(root[key])
# node_stack.append(node)
#
# return node_stack
| {
"repo_name": "jfillmore/hoops",
"path": "hoops/restful.py",
"copies": "1",
"size": "8252",
"license": "mit",
"hash": 2706278128881264000,
"line_mean": 37.0276497696,
"line_max": 143,
"alpha_frac": 0.6177896268,
"autogenerated": false,
"ratio": 3.825683820120538,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9935228151323434,
"avg_score": 0.0016490591194207027,
"num_lines": 217
} |
from functools import wraps
import json
from core.db.manager import DataHubManager
from util import pick
dbwipes_repo = 'dbwipes_cache'
dbwipes_table = 'dbwipes_cache'
def does_cache_exist(repo_base):
""" check to see if the cache exists for the repo base"""
manager = DataHubManager(repo_base)
repo_exists = False
table_exists = False
schema_correct = False
# check for repo
if dbwipes_repo in manager.list_repos():
repo_exists = True
# check for table
if repo_exists and dbwipes_table in manager.list_tables(dbwipes_repo):
table_exists = True
# check for schema
schema = None
if table_exists:
schema = manager.get_schema(dbwipes_repo, dbwipes_table)
if schema == [('key', 'character varying'), ('val', 'text')]:
schema_correct = True
return repo_exists and table_exists and schema_correct
def create_cache(username):
""" DBWipes stores some metadata about the table in a schema in the owner's
database. Note that this is not necessarily the current user's DB
"""
try:
query = ('create table if not exists %s.dbwipes_cache'
'(key varchar, val text)') % dbwipes_repo
manager = DataHubManager(user=username)
manager.create_repo(dbwipes_repo)
manager.execute_sql(query)
return True
except Exception as e:
print(e)
return False
def insert_into_cache(f):
"""Inserts metadata into the cache"""
@wraps(f)
def _f(self, *args, **kwargs):
try:
key = str(map(str, (f.__name__, self.repo, self.tablename,
self.where, self.nbuckets, map(str, args))))
query = 'select val from {}.dbwipes_cache where key = %s'.format(
dbwipes_repo)
manager = DataHubManager(user=self.repo_base)
vals = manager.execute_sql(query, (key,))['tuples']
if len(vals) > 0:
return json.loads(vals[0][0])
except Exception as e:
print(e)
res = f(self, *args, **kwargs)
if key:
value = json.dumps(res, default=json_handler)
params = (key, value)
q = 'insert into ' + dbwipes_repo + '.dbwipes_cache values(%s, %s)'
manager = DataHubManager(user=self.repo_base)
manager.execute_sql(q, params)
return res
return _f
def json_handler(o):
if hasattr(o, 'isoformat'):
return o.isoformat()
class Summary(object):
def __init__(self, repo, tablename, username, repo_base=None,
nbuckets=50, where=''):
self.username = username
self.repo_base = repo_base
self.repo = repo
self.tablename = tablename
self.nbuckets = nbuckets
self.where = ''
where = where.strip()
if where:
self.where = 'WHERE %s' % where
# make sure cache exists
# create_cache(username)
self.nrows = self.get_num_rows()
self.col_types = self.get_columns_and_types()
def __call__(self):
stats = []
for col, typ in self.col_types:
# print "stats for: %s\t%s" % (col, typ)
col_stats = self.get_col_stats(col, typ)
if col_stats is None:
# print "\tgot None"
continue
# print "\tgot %d" % (len(col_stats))
stats.append((col, typ, col_stats))
return stats
def close(self):
pass
def reset_cache(self):
q = "delete from cache where key like '%%%%%s%%%%%s%%%%'" % (
str(self.engine), self.tablename)
manager = DataHubManager(user=self.username, repo_base=self.username)
manager.execute_sql(q)
def query(self, q, *args):
"""
Summaries using other engines only need to override this method
"""
manager = DataHubManager(user=self.username, repo_base=self.repo_base)
return manager.execute_sql(q, params=args)['tuples']
@insert_into_cache
def get_num_rows(self):
q = "SELECT count(*) from %s" % self.tablename
return self.query(q)[0][0]
@insert_into_cache
def get_distinct_count(self, col):
q = "SELECT count(distinct %s) FROM %s %s" % (
col, self.tablename, self.where)
return self.query(q)[0][0]
@insert_into_cache
def get_column_counts(self, cols):
q = 'SELECT %s FROM %s'
select = ["count(distinct %s)" % col for col in cols]
select = ", ".join(select)
q = q % (select, self.tablename)
counts = tuple(self.query(q)[0])
return dict(zip(cols, counts))
@insert_into_cache
def get_columns_and_types(self):
manager = DataHubManager(user=self.username, repo_base=self.repo_base)
tokens = self.tablename.split('.')
repo = tokens[0]
table = tokens[1]
rows = manager.get_schema(repo, table)
ret = []
for col, typ in rows:
if typ == 'text':
typ = 'str'
if 'double' in typ:
typ = 'num'
if 'int' in typ:
typ = 'num'
if 'date' in typ or 'time' in typ:
typ = 'time'
ret.append((str(col), str(typ)))
return ret
@insert_into_cache
def get_columns(self):
"""
engine specific way to get table columns
"""
return pick(self.get_columns_and_types(), 0)
@insert_into_cache
def get_type(self, col_name):
return dict(self.get_columns_and_types()).get(col_name, None)
def get_col_groupby(self, col_name, col_type):
if col_type is None:
return None
groupby = None
if 'time' == col_type:
groupby = self.get_time_stats(col_name)
if 'date' in col_type or 'timestamp' in col_type:
groupby = self.get_date_stats(col_name)
return groupby
@insert_into_cache
def get_col_stats(self, col_name, col_type=None):
if col_type is None:
col_type = self.get_type(col_name)
# if col_type.startswith('_'):
# return None
numerics = ['int', 'float', 'double', 'numeric', 'num']
chars = ['char', 'text', 'str']
is_numeric = col_type in numerics
is_char = col_type in chars
if is_numeric:
return self.get_numeric_stats(col_name)
elif is_char:
return self.get_char_stats(col_name)
groupby = self.get_col_groupby(col_name, col_type)
if groupby:
stats = self.get_group_stats(col_name, groupby)
return stats
return None
def get_group_stats(self, col_name, groupby):
q = ('select %s as GRP, min(%s), max(%s), count(*) '
'from %s %s group by GRP '
'order by GRP limit %d')
q = q % (groupby, col_name, col_name,
self.tablename, self.where, self.nbuckets)
rows = [{'val': x, 'count': count, 'range': [minv, maxv]}
for (x, minv, maxv, count) in self.query(q)]
return rows
def get_numeric_stats(self, c):
ndistinct = self.get_distinct_count(c)
if ndistinct == 0:
return []
if ndistinct == 1:
if self.where:
q = "SELECT %s from %s %s AND %s is not null"
args = (c, self.tablename, self.where, c)
else:
q = "SELECT %s from %s WHERE %s is not null"
args = (c, self.tablename, c)
val = self.query(q % args)[0][0]
return [{'val': val, 'count': self.nrows, 'range': [val, val]}]
q = """
with TMP as (
SELECT 2.5 * stddev(%s) / %d as block FROM %s %s
)
SELECT (%s/block)::int*block as bucket,
min(%s) as min,
max(%s) as max,
count(*) as count
FROM %s, TMP
%s
GROUP BY bucket
ORDER BY bucket
"""
q = q % (c, self.nbuckets, self.tablename, self.where,
c, c, c, self.tablename, self.where)
stats = []
for (val, minv, maxv, count) in self.query(q):
if val is None:
stats.append({
'val': None,
'count': count,
'range': [minv, maxv]
})
else:
stats.append({
'val': (maxv + minv) / 2.,
'count': count,
'range': [minv, maxv]
})
return stats
def get_char_stats(self, col_name):
q = """
select %s as GRP, min(%s), max(%s), count(*)
FROM %s
%s
GROUP BY GRP
ORDER BY count(*) desc
LIMIT %d
"""
q = q % (col_name, col_name, col_name,
self.tablename, self.where, self.nbuckets)
rows = [{'val': x, 'count': count, 'range': [minv, maxv]}
for (x, minv, maxv, count) in self.query(q)]
return rows
def get_time_stats(self, col_name):
return "date_trunc('hour', %s)::time" % col_name
def get_date_stats(self, col_name):
q = ('select max(%s)::date, min(%s)::date, '
'EXTRACT(EPOCH FROM (max(%s::timestamp) - min(%s::timestamp)))/60'
' as minutes from %s')
q = q % (col_name, col_name, col_name, col_name, self.tablename)
(maxv, minv, nminutes) = self.query(q)[0]
if maxv is None or minv is None or nminutes is None:
return None
ndays = nminutes / 60 / 24
var = "%s::timestamp" % col_name
if ndays == 0:
groupby = "date_trunc('hour', %s)" % var
elif ndays <= 30:
groupby = "date_trunc('day', %s)" % var
elif ndays <= 50 * 7:
groupby = "date_trunc('week', %s)" % var
elif ndays <= 365 * 12:
groupby = "date_trunc('month', %s)" % var
else:
groupby = "date_trunc('year', %s)" % var
return groupby
| {
"repo_name": "anantb/datahub",
"path": "src/apps/dbwipes/summary.py",
"copies": "2",
"size": "10196",
"license": "mit",
"hash": 6970746650246567000,
"line_mean": 30.0853658537,
"line_max": 79,
"alpha_frac": 0.5201059239,
"autogenerated": false,
"ratio": 3.659727207465901,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 328
} |
from functools import wraps
import json
from django.contrib.auth.models import (AnonymousUser,
User,
check_password)
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.shortcuts import render
from django.template.loader import render_to_string
from django.views.decorators.csrf import csrf_exempt
from django.utils.html import mark_safe
from django.views.decorators.cache import never_cache
from forms.cached_uploads import review_cached_upload_view
from models import (api_get_available_tools,
get_available_packages,
get_available_tools,
)
### Authorization ###
def require_nothing(f):
"""Wrap the view function to require nothing."""
@wraps(f)
def wrapped_f(request, *args, **kwds):
request.agda_authorization_checked = True
return f(request, *args, **kwds)
return wrapped_f
### Basic views ###
@never_cache
@require_nothing
def top(request):
"""
Handle the top level (welcome) page.
Django view.
"""
return render(request, 'agda/top.html')
@never_cache
@require_nothing
def contact(request):
"""
Handle the contact page.
Django view.
"""
return render(request, 'agda/contact.html')
@never_cache
@require_nothing
def new_user(request):
"""
Handle the new user page.
Django view.
"""
return render(request, 'agda/new_user.html')
### HTTP BASIC AUTHENTICATION ###
def basic_challenge():
"""Basic http AUTH for API wrappers
Returns
-------
HttpResponse : with WWW-Auth
"""
response = HttpResponse('Please authenticate\n', mimetype="text/plain")
response['WWW-Authenticate'] = 'Basic realm=API'
response.status_code = 401
return response
def get_basic_authorization(request):
"""Gets basic auth username and password
Parameters
----------
request : A django request obj
Returns
-------
auth_username : base64 decoded
auth_password : base64 decoded
"""
http_auth = request.META.get('HTTP_AUTHORIZATION')
if not http_auth:
raise ValueError('http authorization missing')
(auth_method, auth) = http_auth.split(None, 1)
if auth_method.lower() != "basic":
raise ValueError('bad http authorization method')
try:
auth_username, auth_password = auth.strip().decode('base64').split(':')
except:
raise ValueError('bad authorization encoding')
return auth_username, auth_password
def get_basic_authenticated_api_user(request):
"""Get the agda user from basic auth
Get the Agda user and profile from the basic http auth information.
Checks the provided HTTP auth password against users profile API password
Parameters
----------
request : A django request obj
Returns
-------
user : Authenticated Django agda user obj
Raises
------
ValueError()
| If user has no profile.
| If User has no api password set
| If wrong api password
"""
auth_username, auth_password = get_basic_authorization(request)
try:
user = User.objects.get(username=auth_username)
except:
raise ValueError('no such user')
try:
profile = user
except:
raise ValueError('user has no profile')
if not profile.api_password or profile.api_password == '!':
raise ValueError('user has no api password set')
if not check_password(auth_password, profile.api_password):
raise ValueError('wrong api password')
# Authorized!
return user
def api_require_nothing(f):
"""Wrap the api view function to require nothing.
Decorated to exempt the Cross Site Request Forgery protection
It is needed to exempt the csrf for the api functions.
"""
@wraps(f)
@csrf_exempt
def wrapped_f(request, *args, **kwds):
try:
api_user = get_basic_authenticated_api_user(request)
except:
if request.META.get('HTTP_AUTHORIZATION'):
return basic_challenge()
api_user = AnonymousUser()
request.agda_api_user = api_user
request.agda_authorization_checked = True
return f(request, *args, **kwds)
return wrapped_f
### Helpers ###
def package_template_dict(request, package, *args, **kw):
"""Context vars suitable for templates that extend package-page.
see datisca/templates/top.html
See also
--------
models.package.get_available_tools
models.tools.available
"""
available_tools = package.get_available_tools(request.user)
return dict(*args, package=package, package_tools=available_tools, **kw)
def json_datetime_encoder(obj):
"""Use as a json.dump default= function to encode datetimes as isoformat strings."""
try:
return obj.isoformat()
except:
raise TypeError('Object of type %s with value of %s is not JSON serializable' % (type(obj), repr(obj)))
def json_response(data, status=200, default=None):
"""JSON-encode basic python data data and return it in a response.
default is passed to json.dumps, and should be a callable such that
default(obj) should return something that is json serializable.
"""
body = json.dumps(data, default=default)
return HttpResponse(body, mimetype='application/json', status=status)
def script_data(data):
"""json dumps data and use Django utils mark_safe to
Explicitly mark a string as safe for (HTML) output purposes
"""
return mark_safe(json.dumps(data))
def stream(*parts):
for part in parts:
if isinstance(part, basestring):
part = [part]
for text in part:
yield text
def render_and_split(template, split, dictionary=None, context=None, split_tag='SPLIT_RENDERED_TEMPLATE_HERE'):
params = dict.fromkeys(split, split_tag)
params.update(dictionary)
return render_to_string(template, params, context_instance=context).split(split_tag)
review_cached_upload = require_nothing(review_cached_upload_view)
### Tools ###
@api_require_nothing
def api_list_tools(request):
services = dict()
for tool in api_get_available_tools(request.agda_api_user):
services[tool.name] = request.build_absolute_uri(reverse(tool.api_view))
return json_response(services)
@require_nothing
def list_tools(request):
available = get_available_tools(request.user)
available.sort(key=lambda tool: tool.displayname.lower())
return render(request, 'agda/list-tools.html', dict(tools=available))
@require_nothing
def list_packages(request):
available = get_available_packages(request.user)
available.sort(key=lambda package: package.displayname.lower())
return render(request, 'agda/list-packages.html', dict(packages=available))
| {
"repo_name": "BILS/agda",
"path": "agda/agda/views.py",
"copies": "1",
"size": "6916",
"license": "mit",
"hash": -8088865649991437000,
"line_mean": 25.6,
"line_max": 111,
"alpha_frac": 0.6609311741,
"autogenerated": false,
"ratio": 4.082644628099174,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5243575802199173,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import json
from django.http import JsonResponse, HttpResponseNotAllowed
from django.utils.decorators import available_attrs
def methods(method_list):
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner(request, *args, **kw):
if request.method not in method_list:
return HttpResponseNotAllowed(method_list, 'Method Not Allow')
return func(request, *args, **kw)
return inner
return decorator
def get_headers(request):
headers = {}
for key, value in request.META.iteritems():#use iterator
if key.startswith('HTTP_'):
headers['-'.join(key.split('_')[1:]).title()] = value
elif key.startswith('CONTENT'):
headers['-'.join(key.split('_')).title()] = value
return headers
def no_get(request):
rep_dict = {
'args': request.GET,
'data': request.body,
'files': request.FILES,
'form': request.POST,
'headers': get_headers(request),
'json': None,
'origin': request.META['REMOTE_ADDR'],
'url': request.build_absolute_uri(),
}
if 'json' in request.content_type:
try:
rep_dict['json'] = json.loads(request.body)
except:
pass
return rep_dict
| {
"repo_name": "baby5/Django-httpbin",
"path": "httpbin/bin/helpers.py",
"copies": "1",
"size": "1339",
"license": "mit",
"hash": -19246178545450010,
"line_mean": 28.1086956522,
"line_max": 78,
"alpha_frac": 0.5922330097,
"autogenerated": false,
"ratio": 4.09480122324159,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.004182134776706967,
"num_lines": 46
} |
from functools import wraps
import json
from django import http
from django.conf import settings
from django.core.exceptions import PermissionDenied
def ssl_required(view_func):
"""A view decorator that enforces HTTPS.
If settings.SESSION_COOKIE_SECURE is False, it won't enforce anything."""
@wraps(view_func)
def _checkssl(request, *args, **kwargs):
if settings.SESSION_COOKIE_SECURE and not request.is_secure():
url_str = request.build_absolute_uri()
url_str = url_str.replace('http://', 'https://')
return http.HttpResponseRedirect(url_str)
return view_func(request, *args, **kwargs)
return _checkssl
# Copy/pasta from from https://gist.github.com/1405096
# TODO: Log the hell out of the exceptions.
JSON = 'application/json'
def json_view(f):
"""Ensure the response content is well-formed JSON.
Views wrapped in @json_view can return JSON-serializable Python objects,
like lists and dicts, and the decorator will serialize the output and set
the correct Content-type.
Views may also throw known exceptions, like Http404, PermissionDenied,
etc, and @json_view will convert the response to a standard JSON error
format, and set the status code and content type.
"""
@wraps(f)
def _wrapped(req, *a, **kw):
try:
ret = f(req, *a, **kw)
blob = json.dumps(ret)
return http.HttpResponse(blob, content_type=JSON)
except http.Http404, e:
blob = json.dumps({
'success': False,
'error': 404,
'message': str(e),
})
return http.HttpResponseNotFound(blob, content_type=JSON)
except PermissionDenied, e:
blob = json.dumps({
'success': False,
'error': 403,
'message': str(e),
})
return http.HttpResponseForbidden(blob, content_type=JSON)
except Exception, e:
blob = json.dumps({
'success': False,
'error': 500,
'message': str(e),
})
return http.HttpResponseServerError(blob, content_type=JSON)
return _wrapped
def cors_enabled(origin, methods=['GET']):
"""A simple decorator to enable CORS."""
def decorator(f):
@wraps(f)
def decorated_func(request, *args, **kwargs):
if request.method == 'OPTIONS':
# preflight
if ('HTTP_ACCESS_CONTROL_REQUEST_METHOD' in request.META and
'HTTP_ACCESS_CONTROL_REQUEST_HEADERS' in request.META):
response = http.HttpResponse()
response['Access-Control-Allow-Methods'] = ", ".join(
methods)
# TODO: We might need to change this
response['Access-Control-Allow-Headers'] = \
request.META['HTTP_ACCESS_CONTROL_REQUEST_HEADERS']
else:
return http.HttpResponseBadRequest()
elif request.method in methods:
response = f(request, *args, **kwargs)
else:
return http.HttpResponseBadRequest()
response['Access-Control-Allow-Origin'] = origin
return response
return decorated_func
return decorator
| {
"repo_name": "dbbhattacharya/kitsune",
"path": "kitsune/sumo/decorators.py",
"copies": "1",
"size": "3408",
"license": "bsd-3-clause",
"hash": 8443421191597048000,
"line_mean": 33.4242424242,
"line_max": 79,
"alpha_frac": 0.576584507,
"autogenerated": false,
"ratio": 4.478318002628121,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 99
} |
from functools import wraps
import json
try: # Python 3 imports
from urllib.parse import urljoin
except ImportError: # Python 2 imports
from urlparse import urljoin
from collections import defaultdict
from threading import Thread
from twisted.internet import reactor, ssl
from twisted.internet.defer import Deferred
from twisted.internet.protocol import Protocol
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from twisted.internet.ssl import ClientContextFactory
from .base_request import BaseRequest
from . import exceptions
from .models.config import Config
from .models.device import Device
from .settings import Settings
class WebClientContextFactory(ClientContextFactory):
"""
This is low level class and is not meant to be used by end users directly.
"""
def getContext(self, hostname, port):
return ClientContextFactory.getContext(self)
class StreamingParser(Protocol):
"""
This is low level class and is not meant to be used by end users directly.
"""
def __init__(self, callback, error):
self.callback = callback
self.error = error
self.pending = b''
def dataReceived(self, data):
obj = {}
self.pending += data
lines = self.pending.split(b'\n')
self.pending = lines.pop()
for line in lines:
try:
if line:
obj = json.loads(line)
except Exception as e:
self.transport.stopProducing()
self.transport.loseConnection()
if self.error:
self.error(e)
break
self.callback(obj)
def connectionLost(self, reason):
pass
def cbRequest(response, callback, error):
protocol = StreamingParser(callback, error)
response.deliverBody(protocol)
return protocol
def cbDrop(protocol):
protocol.transport.stopProducing()
protocol.transport.loseConnection()
class Subscription(object):
"""
This is low level class and is not meant to be used by end users directly.
"""
def __init__(self):
self.context_factory = WebClientContextFactory()
self.settings = Settings()
def add(self, uuid, callback, error=None, count=None):
query = 'stream=1'
if count:
query = 'stream=1&count={}'.format(count)
url = urljoin(
self.settings.get('api_endpoint'),
'/device/v2/{uuid}/logs?{query}'.format(uuid=uuid, query=query)
)
headers = {}
headers[b'Authorization'] = ['Bearer {:s}'.format(self.settings.get('token')).encode()]
agent = Agent(reactor, self.context_factory)
d = agent.request(b'GET', url.encode(), Headers(headers), None)
d.addCallback(cbRequest, callback, error)
self.run()
return d
def run(self):
if not reactor.running:
Thread(target=reactor.run, args=(False,)).start()
def stop(self, d):
reactor.callFromThread(d.addCallback, cbDrop)
class Logs(object):
"""
This class implements functions that allow processing logs from device.
"""
subscriptions = defaultdict(list)
def __init__(self):
self.base_request = BaseRequest()
self.config = Config()
self.device = Device()
self.settings = Settings()
self.subscription_handler = Subscription()
def __exit__(self, exc_type, exc_value, traceback):
reactor.stop()
def subscribe(self, uuid, callback, error=None, count=None):
"""
Subscribe to device logs.
Args:
uuid (str): device uuid.
callback (function): this callback is called on receiving a message.
error (Optional[function]): this callback is called on an error event.
count (Optional[int]): number of historical messages to include.
Returns:
dict: a log entry will contain the following keys: `isStdErr, timestamp, message, isSystem, createdAt`.
"""
self.device.get(uuid)
self.subscriptions[uuid].append(self.subscription_handler.add(uuid, callback, error, count))
def history(self, uuid, count=None):
"""
Get device logs history.
Args:
uuid (str): device uuid.
count (Optional[int]): number of historical messages to include.
"""
raw_query = ''
if count:
raw_query = 'count={}'.format(count)
return self.base_request.request(
'/device/v2/{uuid}/logs'.format(uuid=uuid), 'GET', raw_query=raw_query,
endpoint=self.settings.get('api_endpoint')
)
def unsubscribe(self, uuid):
"""
Unsubscribe from device logs for a specific device.
Args:
uuid (str): device uuid.
"""
if uuid in self.subscriptions:
for d in self.subscriptions[uuid]:
self.subscription_handler.stop(d)
del self.subscriptions[uuid]
def unsubscribe_all(self):
"""
Unsubscribe all subscribed devices.
"""
for device in self.subscriptions:
for d in self.subscriptions[device]:
self.subscription_handler.stop(d)
self.subscriptions = {}
| {
"repo_name": "resin-io/resin-sdk-python",
"path": "balena/logs.py",
"copies": "1",
"size": "5351",
"license": "apache-2.0",
"hash": -2619902831493143600,
"line_mean": 26.725388601,
"line_max": 115,
"alpha_frac": 0.6144645861,
"autogenerated": false,
"ratio": 4.43698175787728,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.555144634397728,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import locale
from datetime import datetime
import json
import markdown2 as markdown
from flask import Flask, render_template, request, redirect, url_for, g, abort, Response, session
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy import create_engine
from sqlalchemy.pool import StaticPool
from wagner.models import User, metadata
from faust_sdk import FaustApi
app = Flask(__name__)
app.secret_key = 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT'
connection_string = 'sqlite:///:memory:'
engine = create_engine(connection_string,
connect_args={'check_same_thread':False},
poolclass=StaticPool,
echo=False
)
Session = sessionmaker(engine)
db_session = scoped_session(Session)
metadata.drop_all(engine)
metadata.create_all(engine)
def oauth_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not hasattr(g, 'user') or g.user is None:
api = FaustApi('http://localhost:5000', 'wagnerkey', 'wagnersecret')
callback_uri = 'http://localhost:5001/oauth_callback?next=%s' % (request.url,)
print 'CALLBACK:', callback_uri
resource_owner_key, resource_owner_secret = api.initiate(callback_uri=callback_uri)
authorization_url = api.get_authorization_url(resource_owner_key, resource_owner_secret)
return redirect(authorization_url)
return f(*args, **kwargs)
return decorated_function
@app.route('/oauth_callback')
def oauth_callback():
api = FaustApi('http://localhost:5000', 'wagnerkey', 'wagnersecret')
token, secret, verifier = api.parse_authorization_response(request.url)
access_key, access_secret = api.get_access_token(token, secret, verifier)
user = User('', '', token_key=access_key, token_secret=access_secret)
db_session.add(user)
db_session.commit()
session['token_key'] = user.token_key
next = request.args.get('next')
if next is not None:
return redirect(next)
return redirect(url_for('index'))
@app.route('/logout')
def logout():
del session['token_key']
return redirect(url_for('index'))
@app.route('/login/<url>')
@oauth_required
def login_show_blog(url):
return redirect(url_for('show_blog', url=url))
@app.template_filter('markdown')
def markdown_filter(s):
return markdown.markdown(s)
@app.before_request
def init_blog():
token_key = session.get('token_key')
if token_key is not None:
g.user = db_session.query(User).filter(User.token_key==token_key).first()
if g.user is not None:
g.api = FaustApi('http://localhost:5000', 'wagnerkey', 'wagnersecret',
g.user.token_key, g.user.token_secret)
else:
g.api = FaustApi('http://localhost:5000', 'wagnerkey', 'wagnersecret')
else:
g.user = None
g.api = FaustApi('http://localhost:5000', 'wagnerkey', 'wagnersecret')
@app.teardown_request
def teardown_request(exception):
db_session.remove()
@app.route('/')
def index():
blogs = g.api.get_blogs()
return render_template('index.html', blogs=blogs)
@app.route('/<url>')
def show_blog(url):
blog = g.api.get_blog_by_url(url)
entries = g.api.get_entries_by_blog(blog.id)
return render_template('blog.html', blog=blog, entries=entries, user=g.user)
@app.route('/create', methods=['GET', 'POST'])
@oauth_required
def create_entry():
if request.method == 'GET':
blog_id = request.args.get('blog_id')
blog = g.api.get_blog(blog_id)
return render_template('create.html', blog=blog)
elif request.method == 'POST':
title = request.form['title']
text = request.form['text']
tags = [tag.strip() for tag in request.form['tags'].split(',') if tag.strip() != '']
blog_url = request.form['blog_url']
blog_id = request.form['blog_id']
data = {'title': title, 'text': text, 'tags': tags}
g.api.create_entry(blog_id, data)
return redirect(url_for('show_blog', url=blog_url))
@app.route('/edit/<int:id>', methods=['GET', 'POST'])
@oauth_required
def edit_entry(id):
entry = g.api.get_entry(id)
blog = entry.blog
blog_url = blog.url
if request.method == 'GET':
return render_template('edit.html', entry=entry, blog=blog)
elif request.method == 'POST':
title = request.form['title']
text = request.form['text']
tags = [tag.strip() for tag in request.form['tags'].split(',') if tag.strip() != '']
data = {'title': title, 'text': text, 'tags': tags}
g.api.update_entry(id, data)
return redirect(url_for('show_blog', url=blog_url))
@app.route('/delete/<int:id>')
@oauth_required
def delete_entry(id):
entry = g.api.get_entry(id)
blog_url = entry.blog.url
g.api.delete_entry(id)
return redirect(url_for('show_blog', url=blog_url))
@app.template_filter('date')
def datetime_filter(date, fmt='%H:%M, %d.%m.%Y'):
# locale.setlocale(locale.LC_ALL, "deu_deu")
# '%Y-%m-%d %H:%M:%S.%f'
d = datetime.strptime(date, '%Y-%m-%d %H:%M:%S')
suffix = ['st', 'nd', 'rd'][d.day % 10 - 1] if d.day % 10 in [1, 2, 3] else 'th'
s = '{1:d}{2} {0:%B}, {0:%Y}'.format(d, d.day, suffix)
#return s
return d.strftime(fmt)
if __name__ == '__main__':
app.run(port=5001, debug=True)
| {
"repo_name": "verbit/wagner",
"path": "run.py",
"copies": "1",
"size": "5070",
"license": "mit",
"hash": 8755259666981940000,
"line_mean": 31.9220779221,
"line_max": 97,
"alpha_frac": 0.6710059172,
"autogenerated": false,
"ratio": 3.059746529873265,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4230752447073265,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
from celery import task
from django.db import IntegrityError
from django_facebook.utils import get_class_for
from django_facebook.signals import facebook_token_extend_finished
logger = logging.getLogger(__name__)
@task.task(ignore_result=True)
def extend_access_token(profile, access_token):
'''
Extends the access token to 60 days and saves it on the profile
:param profile: the profile or user object
:param access_token: a valid access token
:type access_token: string
'''
results = profile._extend_access_token(access_token)
return results
@task.task(ignore_result=True)
def store_likes(user, likes):
'''
Inserting again will not cause any errors, so this is safe
for multiple executions
:param user: The user for which we are storing
:type user: User object
:param friends: List of your likes
:type friends: list
'''
converter_class = get_class_for('user_conversion')
logger.info('celery is storing %s likes' % len(likes))
converter_class._store_likes(user, likes)
return likes
@task.task(ignore_result=True)
def store_friends(user, friends):
'''
Inserting again will not cause any errors, so this is safe
for multiple executions
:param user: The user for which we are storing
:type user: User object
:param friends: List of your friends
:type friends: list
'''
converter_class = get_class_for('user_conversion')
logger.info('celery is storing %s friends' % len(friends))
converter_class._store_friends(user, friends)
return friends
@task.task(ignore_result=True)
def get_and_store_likes(user, facebook):
'''
Since facebook is quite slow this version also runs the get
on the background
Inserting again will not cause any errors, so this is safe
for multiple executions
:param user: The user for which we are storing
:type user: User object
:param facebook: The graph connection to facebook
:type facebook: FacebookUserConverter object
'''
try:
logger.info('attempting to get and store friends for %s', user.id)
stored_likes = facebook._get_and_store_likes(user)
logger.info('celery is storing %s likes', len(stored_likes))
return stored_likes
except IntegrityError as e:
logger.warn(
'get_and_store_likes failed for %s with error %s', user.id, e)
@task.task(ignore_result=True)
def get_and_store_friends(user, facebook):
'''
Since facebook is quite slow this version also runs the get
on the background
Inserting again will not cause any errors, so this is safe
for multiple executions
:param user: The user for which we are storing
:type user: User object
:param facebook: The graph connection to facebook
:type facebook: FacebookUserConverter object
'''
try:
logger.info('attempting to get and store friends for %s', user.id)
stored_friends = facebook._get_and_store_friends(user)
logger.info('celery is storing %s friends', len(stored_friends))
return stored_friends
except IntegrityError as e:
logger.warn(
'get_and_store_friends failed for %s with error %s', user.id, e)
@task.task(ignore_result=True)
def remove_share(share):
'''
Removes the given open graph share
:param share: the open graph share object
'''
share._remove()
@task.task(ignore_result=True)
def retry_open_graph_share(share, reset_retries=False):
'''
We will retry open graph shares after 15m to make sure we dont miss out on any
shares if Facebook is having a minor outage
'''
logger.info('retrying open graph share %s', share)
share.retry(reset_retries=reset_retries)
@task.task(ignore_result=True)
def retry_open_graph_shares_for_user(user):
'''
We retry the open graph shares for a user when he gets a new access token
'''
from django_facebook.models import OpenGraphShare
shares = OpenGraphShare.objects.recently_failed().filter(user=user)[:1000]
shares = list(shares)
logger.info('retrying %s shares for user %s', len(shares), user)
for share in shares:
retry_open_graph_share(share, reset_retries=True)
def token_extended_connect(sender, user, profile, token_changed, old_token, **kwargs):
from django_facebook import settings as facebook_settings
if facebook_settings.FACEBOOK_CELERY_TOKEN_EXTEND:
# This is only save to run if we are using Celery
# make sure we don't have troubles caused by replication lag
retry_open_graph_shares_for_user.apply_async(args=[user], countdown=60)
facebook_token_extend_finished.connect(token_extended_connect)
| {
"repo_name": "christer155/Django-facebook",
"path": "django_facebook/tasks.py",
"copies": "25",
"size": "4751",
"license": "bsd-3-clause",
"hash": 4391083167781703700,
"line_mean": 30.4635761589,
"line_max": 86,
"alpha_frac": 0.6966954325,
"autogenerated": false,
"ratio": 3.8626016260162603,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0002610295464745922,
"num_lines": 151
} |
from functools import wraps
import logging
from datetime import datetime
def timethis(message = None, logit = lambda *args: logging.info(*args), store = lambda _:_):
"""This can decorate functions or methods.
It times the execution of the decorated object.
One may specify two arguments:
i) a message: defaults to the decorated object's name
ii) a logging function: default to logging.info
Attention: this decorator requires attributes (which may be the default ones),
therefore don't forget to use it like this @timethis()
The parentheses are required even though no arguments are given.
This is just a reminder in case you are unfamiliar with the python syntax for decorators.
"""
def wrap(f, *args, **kw):
@wraps(f)
def wrapped(*args, **kw):
t0 = datetime.now()
r = f(*args, **kw)
delta = datetime.now()-t0
logit('%s: %s', message if message is not None else f.__name__, delta)
store(delta)
return r
return wrapped
return wrap
def debugtime(message = None, level = logging.DEBUG, store = lambda _:_):
"""Syntactic sugar for timethis with default logger at DEBUG level"""
return timethis(message, lambda *args: logging.log(level, *args), store)
| {
"repo_name": "wilkeraziz/chisel",
"path": "python/chisel/util/logtools.py",
"copies": "1",
"size": "1303",
"license": "apache-2.0",
"hash": -1576994431009951000,
"line_mean": 42.4333333333,
"line_max": 93,
"alpha_frac": 0.6577129701,
"autogenerated": false,
"ratio": 4.23051948051948,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.538823245061948,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
from encryption.security import decrypt_obj
from encryption.store.s3 import S3Provider
from conf.appconfig import ENCRYPTION
logger = logging.getLogger(__name__)
def get_s3_store():
return S3Provider(ENCRYPTION['s3']['bucket'],
keys_base=ENCRYPTION['s3']['base'])
def using_encryption_store(fun):
@wraps(fun)
def outer(*args, **kwargs):
if ENCRYPTION['store'] == 's3':
kwargs.setdefault('store', get_s3_store())
else:
logger.warn('No valid encryption store found. '
'Please set env. variable ENCRYPTION_STORE to one of'
'supported values ["s3",]. Defaulting to in-memory '
'store. ')
kwargs.setdefault('passphrase', ENCRYPTION['passphrase'])
return fun(*args, **kwargs)
return outer
@using_encryption_store
def decrypt_config(config, profile='default', store=None, passphrase=None):
return decrypt_obj(config, profile=profile, store=store,
passphrase=passphrase)
| {
"repo_name": "totem/cluster-orchestrator",
"path": "orchestrator/services/security.py",
"copies": "2",
"size": "1107",
"license": "mit",
"hash": -4393428559922297000,
"line_mean": 32.5454545455,
"line_max": 78,
"alpha_frac": 0.6233062331,
"autogenerated": false,
"ratio": 4.130597014925373,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5753903248025374,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
import flask
import utils
import models.base
from thirdparty.containerize import ContainerizeExceptionBase
class Blueprint(flask.Blueprint):
def __init__(self, *args, **kwargs):
flask.Blueprint.__init__(self, *args, **kwargs)
self.app = None
def register(self, app, *args, **kwargs):
self.app = app
flask.Blueprint.register(self, app, *args, **kwargs)
def route_post(self, url_pattern):
return self.route(url_pattern, methods=['POST'])
def route_post_json(self, url_pattern, update_pollings=False):
def wrapper(f):
@self.route_post(url_pattern)
@wraps(f)
def g(*args, **kwargs):
try:
r, code = f(*args, **kwargs), 200
models.base.db.session.commit()
if update_pollings:
self.app.write_polling_targets()
except KeyError, e:
r, code = {
'reason': 'missing argument',
'missing': e.message,
}, 400
except UnicodeEncodeError, e:
r, code = {'reason': 'invalid input encoding'}, 400
except ValueError, e:
r, code = {'reason': e.message}, 400
except ContainerizeExceptionBase, e:
logging.exception(e)
r, code = {
'reason': 'containerize fail',
'detail': e.message,
}, 400
except StandardError, e:
logging.error('UNEXPECTED ERROR')
logging.exception(e)
r, code = {'reason': 'unexpected', 'msg': e.message}, 500
if r is None:
return '', code
return utils.json_response(r, code)
return g
return wrapper
| {
"repo_name": "HunanTV/redis-ctl",
"path": "app/bpbase.py",
"copies": "1",
"size": "1989",
"license": "mit",
"hash": -3940304513312469000,
"line_mean": 35.1636363636,
"line_max": 77,
"alpha_frac": 0.4881850176,
"autogenerated": false,
"ratio": 4.713270142180095,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 55
} |
from functools import wraps
import logging
import json
from datetime import timedelta
from demands import HTTPServiceError
from django.shortcuts import render, redirect, resolve_url
from django.utils.decorators import available_attrs
from django.utils.six.moves.urllib.parse import urlparse
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.sites.shortcuts import get_current_site
from django.contrib import messages
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.urlresolvers import reverse
from django.utils.http import is_safe_url
from django.utils.timezone import now
from django.http import HttpResponseRedirect, JsonResponse
from django.template.defaulttags import register
from django.template.response import TemplateResponse
from django.core.context_processors import csrf
from django.conf import settings
from django import forms
import dateutil.parser
from seed_services_client import (
ControlInterfaceApiClient,
HubApiClient,
IdentityStoreApiClient,
MessageSenderApiClient,
SchedulerApiClient,
StageBasedMessagingApiClient,
)
from seed_services_client.metrics import MetricsApiClient
from urlobject import URLObject
from .forms import (AuthenticationForm, IdentitySearchForm,
RegistrationFilterForm, SubscriptionFilterForm,
ChangeFilterForm, ReportGenerationForm,
AddSubscriptionForm, DeactivateSubscriptionForm,
ChangeSubscriptionForm, MsisdnReportGenerationForm,
UserDetailSearchForm)
from . import utils
logger = logging.getLogger(__name__)
@register.filter
def get_identity_addresses(identity):
details = identity.get('details', {})
default_addr_type = details.get('default_addr_type', None)
addresses = details.get('addresses', {})
if not default_addr_type:
logger.warning('No default_addr_type specified for: %r' % (identity,))
return {}
return addresses.get(default_addr_type, {})
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
@register.filter
def get_date(date_string):
if date_string is not None:
return dateutil.parser.parse(date_string)
@register.simple_tag
def replace_query_param(url, parameter, value):
url = URLObject(url)
return url.set_query_params([(parameter, value)])
ciApi = ControlInterfaceApiClient(
api_url=settings.CONTROL_INTERFACE_SERVICE_URL,
auth_token=settings.CONTROL_INTERFACE_SERVICE_TOKEN
)
def request_passes_test(test_func, login_url=None,
redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the request passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the requst object and returns True if the request passes.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if test_func(request):
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
path, resolved_login_url, redirect_field_name)
return _wrapped_view
return decorator
def login_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME,
login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = request_passes_test(
lambda r: r.session.get('user_token'),
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def has_permission(permissions, permission, object_id=None):
ids = [p['object_id'] for p in permissions if p['type'] == permission]
if object_id is None and len(ids) == 1:
return True
elif object_id is not None and object_id in ids:
return True
return False
def permission_required(function=None, permission=None, object_id=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = request_passes_test(
lambda r: has_permission(r.session.get('user_permissions'), permission, object_id), # noqa
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def tokens_required(service_list):
"""
Ensure the user has the necessary tokens for the specified services
"""
def decorator(func):
@wraps(func)
def inner(request, *args, **kwargs):
for service in service_list:
if service not in request.session["user_tokens"]:
return redirect('denied')
return func(request, *args, **kwargs)
return inner
return decorator
def login(request, template_name='ci/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm):
"""
Displays the login form and handles the login action.
"""
redirect_to = request.POST.get(redirect_field_name,
request.GET.get(redirect_field_name, ''))
if request.method == "POST":
form = authentication_form(request, data=request.POST)
if form.is_valid():
# Ensure the user-originating redirection url is safe.
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
# Okay, security check complete. Get the user object from auth api.
user = form.get_user()
request.session['user_token'] = user["token"]
request.session['user_email'] = user["email"]
request.session['user_permissions'] = user["permissions"]
request.session['user_id'] = user["id"]
request.session['user_list'] = user["user_list"]
if not settings.HIDE_DASHBOARDS:
# Set user dashboards because they are slow to change
dashboards = ciApi.get_user_dashboards(user["id"])
dashboard_list = list(dashboards['results'])
if len(dashboard_list) > 0:
request.session['user_dashboards'] = \
dashboard_list[0]["dashboards"]
request.session['user_default_dashboard'] = \
dashboard_list[0]["default_dashboard"]["id"]
else:
request.session['user_dashboards'] = []
request.session['user_default_dashboard'] = None
# Get the user access tokens too and format for easy access
tokens = ciApi.get_user_service_tokens(
params={"user_id": user["id"]})
token_list = list(tokens['results'])
user_tokens = {}
if len(token_list) > 0:
for token in token_list:
user_tokens[token["service"]["name"]] = {
"token": token["token"],
"url": token["service"]["url"] + "/api/v1"
}
request.session['user_tokens'] = user_tokens
return HttpResponseRedirect(redirect_to)
else:
form = authentication_form(request)
current_site = get_current_site(request)
context = {
'form': form,
redirect_field_name: redirect_to,
'site': current_site,
'site_name': current_site.name,
}
return TemplateResponse(request, template_name, context)
def logout(request):
try:
del request.session['user_token']
del request.session['user_email']
del request.session['user_permissions']
del request.session['user_id']
del request.session['user_dashboards']
del request.session['user_default_dashboard']
except KeyError:
pass
return redirect('index')
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
def index(request):
if "user_default_dashboard" in request.session and \
request.session["user_default_dashboard"] is not None and \
not settings.HIDE_DASHBOARDS:
return HttpResponseRedirect(reverse('dashboard', args=(
request.session["user_default_dashboard"],)))
else:
return render(request, 'ci/index.html')
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
def health_messages(request):
if settings.HIDE_HEALTH:
return redirect('denied')
if request.is_ajax():
METRIC_SENT_SUM = 'message.sent.sum'
client = MetricsApiClient(
settings.METRIC_API_URL,
auth=(settings.METRIC_API_USER, settings.METRIC_API_PASSWORD))
chart_type = request.GET.get('chart_type', None)
today = now()
if chart_type == 'estimated-vs-sent':
get_days = today.weekday() + 1
sent = client.get_metrics(
m=METRIC_SENT_SUM, from_='-%sd' % get_days, interval='1d',
nulls='zeroize')
sent_data = utils.get_ranged_data_from_timeseries(
sent, today, range_type='week')
# The estimate data is stored as .last metrics with 0 - 6
# representing the days of the week. The cron format specifies
# 0 = Sunday whereas Python datetime.weekday() specifies
# 0 = Monday.
estimate_data = []
for day in range(7):
estimated = client.get_metrics(
m='subscriptions.send.estimate.%s.last' % day, from_='-7d',
interval='1d', nulls='zeroize')
estimate_data.append(
utils.get_last_value_from_timeseries(estimated))
return JsonResponse({
'Estimated': estimate_data,
'Sent': sent_data
})
elif chart_type == 'sent-today':
get_hours = today.hour
sent = client.get_metrics(
m=METRIC_SENT_SUM, from_='-%sh' % get_hours, interval='1h',
nulls='zeroize')
sent_data = utils.get_ranged_data_from_timeseries(
sent, today, range_type='day')
return JsonResponse({
'Today': sent_data
})
elif chart_type == 'sent-this-week':
get_days = today.weekday() + 7 # Include last week in the set.
sent = client.get_metrics(
m=METRIC_SENT_SUM, from_='-%sd' % get_days, interval='1d',
nulls='zeroize')
this_week_data = utils.get_ranged_data_from_timeseries(
sent, today, range_type='week')
last_week_data = utils.get_ranged_data_from_timeseries(
sent, today-timedelta(weeks=1), range_type='week')
return JsonResponse({
'Last week': last_week_data,
'This week': this_week_data
})
return render(request, 'ci/health_messages.html')
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
def health_subscriptions(request):
if settings.HIDE_HEALTH:
return redirect('denied')
if request.is_ajax():
METRIC_SUBSCRIPTIONS_SUM = 'subscriptions.created.sum'
client = MetricsApiClient(
settings.METRIC_API_URL,
auth=(settings.METRIC_API_USER, settings.METRIC_API_PASSWORD))
chart_type = request.GET.get('chart_type', None)
today = now()
if chart_type == 'subscriptions-today':
get_hours = today.hour + 24 # Include yesterday in the set.
subscriptions = client.get_metrics(
m=METRIC_SUBSCRIPTIONS_SUM, from_='-%sh' % get_hours,
interval='1h', nulls='zeroize')
today_data = utils.get_ranged_data_from_timeseries(
subscriptions, today, range_type='day')
yesterday_data = utils.get_ranged_data_from_timeseries(
subscriptions, today - timedelta(days=1), range_type='day')
return JsonResponse({
'Yesterday': yesterday_data,
'Today': today_data
})
elif chart_type == 'subscriptions-this-week':
get_days = today.weekday() + 7 # Include last week in the set.
subscriptions = client.get_metrics(
m=METRIC_SUBSCRIPTIONS_SUM, from_='-%sd' % get_days,
interval='1d', nulls='zeroize')
this_week_data = utils.get_ranged_data_from_timeseries(
subscriptions, today, range_type='week')
last_week_data = utils.get_ranged_data_from_timeseries(
subscriptions, today-timedelta(weeks=1), range_type='week')
return JsonResponse({
'Last week': last_week_data,
'This week': this_week_data
})
return render(request, 'ci/health_subscriptions.html')
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
def health_registrations(request):
if settings.HIDE_HEALTH:
return redirect('denied')
if request.is_ajax():
METRIC_REGISTRATIONS_SUM = 'registrations.created.sum'
client = MetricsApiClient(
settings.METRIC_API_URL,
auth=(settings.METRIC_API_USER, settings.METRIC_API_PASSWORD))
chart_type = request.GET.get('chart_type', None)
today = now()
if chart_type == 'registrations-today':
get_hours = today.hour + 24 # Include yesterday in the set.
registrations = client.get_metrics(
m=METRIC_REGISTRATIONS_SUM, from_='-%sh' % get_hours,
interval='1h', nulls='zeroize')
today_data = utils.get_ranged_data_from_timeseries(
registrations, today, range_type='day')
yesterday_data = utils.get_ranged_data_from_timeseries(
registrations, today - timedelta(days=1), range_type='day')
return JsonResponse({
'Yesterday': yesterday_data,
'Today': today_data
})
elif chart_type == 'registrations-this-week':
get_days = today.weekday() + 7 # Include last week in the set.
registrations = client.get_metrics(
m=METRIC_REGISTRATIONS_SUM, from_='-%sd' % get_days,
interval='1d', nulls='zeroize')
this_week_data = utils.get_ranged_data_from_timeseries(
registrations, today, range_type='week')
last_week_data = utils.get_ranged_data_from_timeseries(
registrations, today-timedelta(weeks=1), range_type='week')
return JsonResponse({
'Last week': last_week_data,
'This week': this_week_data
})
return render(request, 'ci/health_registrations.html')
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
def dashboard(request, dashboard_id):
if settings.HIDE_DASHBOARDS:
return redirect('denied')
dashboard = ciApi.get_dashboard(int(dashboard_id))
context = {"dashboard": dashboard}
return render(request, 'ci/dashboard.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
def dashboard_metric(request):
if settings.HIDE_DASHBOARDS:
return redirect('denied')
client = MetricsApiClient(
settings.METRIC_API_URL,
auth=(settings.METRIC_API_USER, settings.METRIC_API_PASSWORD))
response = {"objects": []}
filters = {
"m": [],
"start": "",
"interval": "",
"nulls": ""
}
for k, v in request.GET.lists():
filters[k] = v
if filters.get('from') is not None:
filters['from'] = filters['start']
results = client.get_metrics(**filters)
for metric in filters['m']:
if metric in results:
response["objects"].append({
"key": metric, "values": results[metric]})
else:
response["objects"].append({
"key": metric, "values": []})
return JsonResponse(response)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
def denied(request):
return render(request, 'ci/denied.html')
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
def not_found(request):
return render(request, 'ci/not_found.html')
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['SEED_IDENTITY_SERVICE'])
def identities(request):
context = {}
idApi = IdentityStoreApiClient(
api_url=request.session["user_tokens"]["SEED_IDENTITY_SERVICE"]["url"], # noqa
auth_token=request.session["user_tokens"]["SEED_IDENTITY_SERVICE"]["token"] # noqa
)
if 'address_value' in request.GET:
form = IdentitySearchForm(request.GET)
if form.is_valid():
results = idApi.get_identity_by_address(
address_type=form.cleaned_data['address_type'],
address_value=form.cleaned_data['address_value'])['results']
else:
results = []
else:
form = IdentitySearchForm()
results = idApi.get_identities()['results']
identities = utils.get_page_of_iterator(
results, settings.IDENTITY_LIST_PAGE_SIZE,
request.GET.get('page')
)
context['identities'] = identities
context['form'] = form
return render(request, 'ci/identities.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['SEED_IDENTITY_SERVICE', 'HUB',
'SEED_STAGE_BASED_MESSAGING'])
def identity(request, identity):
idApi = IdentityStoreApiClient(
api_url=request.session["user_tokens"]["SEED_IDENTITY_SERVICE"]["url"], # noqa
auth_token=request.session["user_tokens"]["SEED_IDENTITY_SERVICE"]["token"] # noqa
)
hubApi = HubApiClient(
api_url=request.session["user_tokens"]["HUB"]["url"], # noqa
auth_token=request.session["user_tokens"]["HUB"]["token"] # noqa
)
sbmApi = StageBasedMessagingApiClient(
api_url=request.session["user_tokens"]["SEED_STAGE_BASED_MESSAGING"]["url"], # noqa
auth_token=request.session["user_tokens"]["SEED_STAGE_BASED_MESSAGING"]["token"] # noqa
)
msApi = MessageSenderApiClient(
api_url=request.session["user_tokens"]["SEED_MESSAGE_SENDER"]["url"], # noqa
auth_token=request.session["user_tokens"]["SEED_MESSAGE_SENDER"]["token"] # noqa
)
messagesets_results = sbmApi.get_messagesets()
messagesets = {}
schedules = {}
choices = []
for messageset in messagesets_results["results"]:
messagesets[messageset["id"]] = messageset["short_name"]
schedules[messageset["id"]] = messageset["default_schedule"]
choices.append((messageset["id"], messageset["short_name"]))
results = idApi.get_identity(identity)
sbm_filter = {
"identity": identity
}
subscriptions = sbmApi.get_subscriptions(params=sbm_filter)
if request.method == "POST":
if 'add_subscription' in request.POST:
form = AddSubscriptionForm(request.POST)
language = results['details'].get(settings.LANGUAGE_FIELD)
if language:
if form.is_valid():
subscription = {
"active": True,
"identity": identity,
"completed": False,
"lang": language,
"messageset": form.cleaned_data['messageset'],
"next_sequence_number": 1,
"schedule":
schedules[form.cleaned_data['messageset']],
"process_status": 0,
}
sbmApi.create_subscription(subscription)
messages.add_message(
request,
messages.INFO,
'Successfully created a subscription.',
extra_tags='success'
)
ciApi.create_auditlog({
"identity_id": identity,
"action": "Create",
"action_by": request.session['user_id'],
"model": "subscription"
})
else:
messages.add_message(
request,
messages.ERROR,
'No language value in {} on the identity.'.format(
settings.LANGUAGE_FIELD),
extra_tags='danger'
)
elif 'deactivate_subscription' in request.POST:
form = DeactivateSubscriptionForm(request.POST)
if form.is_valid():
data = {
"active": False
}
sbmApi.update_subscription(
form.cleaned_data['subscription_id'], data)
messages.add_message(
request,
messages.INFO,
'Successfully deactivated the subscription.',
extra_tags='success'
)
ciApi.create_auditlog({
"identity_id": identity,
"subscription_id": form.cleaned_data['subscription_id'],
"action": "Update",
"action_by": request.session['user_id'],
"model": "subscription",
"detail": "Deactivated subscription"
})
elif 'optout_identity' in request.POST:
try:
details = results.get('details', {})
addresses = details.get('addresses', {})
for address_type, addresses in addresses.items():
for address, info in addresses.items():
idApi.create_optout({
"identity": identity,
"optout_type": "stop",
"address_type": address_type,
"address": address,
"request_source": "ci"})
info['optedout'] = True
hubApi.create_optout_admin({
settings.IDENTITY_FIELD: identity
})
messages.add_message(
request,
messages.INFO,
'Successfully opted out.',
extra_tags='success'
)
ciApi.create_auditlog({
"identity_id": identity,
"action": "Update",
"action_by": request.session['user_id'],
"model": "identity",
"detail": "Optout identity"
})
except:
messages.add_message(
request,
messages.ERROR,
'Optout failed.',
extra_tags='danger'
)
hub_filter = {
settings.IDENTITY_FIELD: identity
}
registrations = hubApi.get_registrations(params=hub_filter)
changes = hubApi.get_changes(params=hub_filter)
if results is None:
return redirect('not_found')
outbound_message_params = {
'to_identity': identity,
'ordering': '-created_at',
}
outbound_messages = msApi.get_outbounds(params=outbound_message_params)
outbound_page = request.GET.get('outbound_page')
outbound_paginator = Paginator(
list(outbound_messages['results']),
settings.IDENTITY_MESSAGES_PAGE_SIZE)
try:
outbound_messages = outbound_paginator.page(outbound_page)
except PageNotAnInteger:
outbound_messages = outbound_paginator.page(1)
except EmptyPage:
outbound_messages = outbound_paginator.page(
outbound_paginator.num_pages)
inbound_message_params = {
'from_identity': identity,
'ordering': '-created_at',
}
inbound_messages = msApi.get_inbounds(inbound_message_params)
inbound_page = request.GET.get('inbound_page')
inbound_paginator = Paginator(
list(inbound_messages['results']),
settings.IDENTITY_MESSAGES_PAGE_SIZE)
try:
inbound_messages = inbound_paginator.page(inbound_page)
except PageNotAnInteger:
inbound_messages = inbound_paginator.page(1)
except EmptyPage:
inbound_messages = inbound_paginator.page(inbound_paginator.num_pages)
deactivate_subscription_form = DeactivateSubscriptionForm()
add_subscription_form = AddSubscriptionForm()
add_subscription_form.fields['messageset'] = forms.ChoiceField(
choices=choices)
optout_visible = False
details = results.get('details', {})
addresses = details.get('addresses', {})
msisdns = addresses.get('msisdn', {})
optout_visible = any(
(not d.get('optedout') for _, d in msisdns.items()))
audit_logs = ciApi.get_auditlogs({"identity_id": identity})
context = {
"identity": results,
"registrations": registrations,
"changes": changes,
"messagesets": messagesets,
"subscriptions": subscriptions,
"outbound_messages": outbound_messages,
"add_subscription_form": add_subscription_form,
"deactivate_subscription_form": deactivate_subscription_form,
"inbound_messages": inbound_messages,
"optout_visible": optout_visible,
"audit_logs": audit_logs,
"users": request.session['user_list']
}
context.update(csrf(request))
return render(request, 'ci/identities_detail.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['HUB'])
def registrations(request):
context = {}
hubApi = HubApiClient(
api_url=request.session["user_tokens"]["HUB"]["url"], # noqa
auth_token=request.session["user_tokens"]["HUB"]["token"] # noqa
)
if 'mother_id' in request.GET:
form = RegistrationFilterForm(request.GET)
if form.is_valid():
reg_filter = {
settings.STAGE_FIELD: form.cleaned_data['stage'],
"validated": form.cleaned_data['validated'],
settings.IDENTITY_FIELD:
form.cleaned_data['mother_id']
}
registrations = hubApi.get_registrations(
params=reg_filter)['results']
else:
registrations = []
else:
form = RegistrationFilterForm()
registrations = hubApi.get_registrations()['results']
context['form'] = form
context['registrations'] = utils.get_page_of_iterator(
registrations,
settings.REGISTRATION_LIST_PAGE_SIZE,
request.GET.get('page')
)
return render(request, 'ci/registrations.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['HUB'])
def registration(request, registration):
hubApi = HubApiClient(
api_url=request.session["user_tokens"]["HUB"]["url"], # noqa
auth_token=request.session["user_tokens"]["HUB"]["token"] # noqa
)
if request.method == "POST":
pass
else:
results = hubApi.get_registration(registration)
if results is None:
return redirect('not_found')
context = {
"registration": results
}
context.update(csrf(request))
return render(request, 'ci/registrations_detail.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['HUB'])
def changes(request):
hubApi = HubApiClient(
api_url=request.session["user_tokens"]["HUB"]["url"],
auth_token=request.session["user_tokens"]["HUB"]["token"]
)
if 'mother_id' in request.GET:
form = ChangeFilterForm(request.GET)
if form.is_valid():
change_filter = {
"action": form.cleaned_data['action'],
"validated": form.cleaned_data['validated'],
settings.IDENTITY_FIELD:
form.cleaned_data['mother_id']
}
changes = hubApi.get_changes(params=change_filter)['results']
else:
changes = []
else:
form = ChangeFilterForm()
changes = hubApi.get_changes()['results']
changes = utils.get_page_of_iterator(
changes, settings.CHANGE_LIST_PAGE_SIZE, request.GET.get('page'))
context = {
"changes": changes,
"form": form
}
return render(request, 'ci/changes.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['HUB'])
def change(request, change):
hubApi = HubApiClient(
api_url=request.session["user_tokens"]["HUB"]["url"],
auth_token=request.session["user_tokens"]["HUB"]["token"]
)
if request.method == "POST":
pass
else:
results = hubApi.get_change(change)
if results is None:
return redirect('not_found')
context = {"change": results}
context.update(csrf(request))
return render(request, 'ci/changes_detail.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['SEED_STAGE_BASED_MESSAGING'])
def subscriptions(request):
sbmApi = StageBasedMessagingApiClient(
api_url=request.session["user_tokens"]["SEED_STAGE_BASED_MESSAGING"]["url"], # noqa
auth_token=request.session["user_tokens"]["SEED_STAGE_BASED_MESSAGING"]["token"] # noqa
)
messagesets_results = sbmApi.get_messagesets()
messagesets = {}
for messageset in messagesets_results["results"]:
messagesets[messageset["id"]] = messageset["short_name"]
if 'identity' in request.GET:
form = SubscriptionFilterForm(request.GET)
if form.is_valid():
sbm_filter = {
"identity": form.cleaned_data['identity'],
"active": form.cleaned_data['active'],
"completed": form.cleaned_data['completed']
}
subscriptions = sbmApi.get_subscriptions(
params=sbm_filter)['results']
else:
subscriptions = []
else:
form = SubscriptionFilterForm()
subscriptions = sbmApi.get_subscriptions()['results']
subscriptions = utils.get_page_of_iterator(
subscriptions, settings.SUBSCRIPTION_LIST_PAGE_SIZE,
request.GET.get('page'))
context = {
"subscriptions": subscriptions,
"messagesets": messagesets,
"form": form
}
context.update(csrf(request))
return render(request, 'ci/subscriptions.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['SEED_STAGE_BASED_MESSAGING'])
def subscription(request, subscription):
sbmApi = StageBasedMessagingApiClient(
api_url=request.session["user_tokens"]["SEED_STAGE_BASED_MESSAGING"]["url"], # noqa
auth_token=request.session["user_tokens"]["SEED_STAGE_BASED_MESSAGING"]["token"] # noqa
)
messagesets_results = sbmApi.get_messagesets()
messagesets = {}
for messageset in messagesets_results["results"]:
messagesets[messageset["id"]] = messageset["short_name"]
results = sbmApi.get_subscription(subscription)
if results is None:
return redirect('not_found')
if request.method == "POST":
try:
form = ChangeSubscriptionForm(request.POST)
if form.is_valid():
lang = form.cleaned_data["language"]
messageset = form.cleaned_data["messageset"]
if (lang != results["lang"] or
messageset != results["messageset"]):
hubApi = HubApiClient(
request.session["user_tokens"]["HUB"]["token"],
api_url=request.session["user_tokens"]["HUB"]["url"]) # noqa
change = {
settings.IDENTITY_FIELD: results["identity"],
"subscription": subscription
}
if lang != results["lang"]:
change["language"] = lang
if messageset != results["messageset"]:
change["messageset"] = messagesets[messageset]
hubApi.create_change_admin(change)
messages.add_message(
request,
messages.INFO,
'Successfully added change.',
extra_tags='success'
)
if lang != results["lang"]:
ciApi.create_auditlog({
"identity_id": results["identity"],
"action": "Update",
"action_by": request.session['user_id'],
"model": "subscription",
"detail": "Updated language: {} to {}".format(
results["lang"], lang)
})
if messageset != results["messageset"]:
ciApi.create_auditlog({
"identity_id": results["identity"],
"action": "Update",
"action_by": request.session['user_id'],
"model": "subscription",
"detail": "Updated messageset: {} to {}".format(
messagesets[results["messageset"]],
messagesets[messageset])
})
except:
messages.add_message(
request,
messages.ERROR,
'Change failed.',
extra_tags='danger'
)
languages = sbmApi.get_messageset_languages()
context = {
"subscription": results,
"messagesets": messagesets,
"languages": json.dumps(languages)
}
context.update(csrf(request))
return render(request, 'ci/subscriptions_detail.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
def services(request):
services = ciApi.get_services()
context = {"services": services}
return render(request, 'ci/services.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
def service(request, service):
results = ciApi.get_service(service)
service_status = ciApi.get_service_status(service)
context = {
"service": results,
"service_status": service_status
}
return render(request, 'ci/services_detail.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['SEED_STAGE_BASED_MESSAGING'])
def subscription_failures(request):
sbmApi = StageBasedMessagingApiClient(
api_url=request.session["user_tokens"]["SEED_STAGE_BASED_MESSAGING"]["url"], # noqa
auth_token=request.session["user_tokens"]["SEED_STAGE_BASED_MESSAGING"]["token"] # noqa
)
if request.method == "POST":
requeue = sbmApi.requeue_failed_tasks()
if ('requeued_failed_tasks' in requeue and
requeue['requeued_failed_tasks']):
messages.add_message(
request,
messages.INFO,
'Successfully re-queued all subscription tasks'
)
else:
messages.add_message(
request,
messages.ERROR,
'Could not re-queued all subscription tasks'
)
failures = sbmApi.get_failed_tasks()['results']
failures = utils.get_page_of_iterator(
failures, settings.FAILURE_LIST_PAGE_SIZE, request.GET.get('page'))
context = {
'failures': failures
}
context.update(csrf(request))
return render(request, 'ci/failures_subscriptions.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['SEED_SCHEDULER'])
def schedule_failures(request):
schdApi = SchedulerApiClient(
request.session["user_tokens"]["SEED_SCHEDULER"]["token"], # noqa
api_url=request.session["user_tokens"]["SEED_SCHEDULER"]["url"] # noqa
)
if request.method == "POST":
requeue = schdApi.requeue_failed_tasks()
if ('requeued_failed_tasks' in requeue and
requeue['requeued_failed_tasks']):
messages.add_message(
request,
messages.INFO,
'Successfully re-queued all scheduler tasks'
)
else:
messages.add_message(
request,
messages.ERROR,
'Could not re-queued all scheduler tasks'
)
failures = schdApi.get_failed_tasks()['results']
failures = utils.get_page_of_iterator(
failures, settings.FAILURE_LIST_PAGE_SIZE, request.GET.get('page'))
context = {
'failures': failures,
}
context.update(csrf(request))
return render(request, 'ci/failures_schedules.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['SEED_MESSAGE_SENDER'])
def outbound_failures(request):
msApi = MessageSenderApiClient(
request.session["user_tokens"]["SEED_MESSAGE_SENDER"]["token"], # noqa
api_url=request.session["user_tokens"]["SEED_MESSAGE_SENDER"]["url"] # noqa
)
if request.method == "POST":
requeue = msApi.requeue_failed_tasks()
if ('requeued_failed_tasks' in requeue and
requeue['requeued_failed_tasks']):
messages.add_message(
request,
messages.INFO,
'Successfully re-queued all outbound tasks'
)
else:
messages.add_message(
request,
messages.ERROR,
'Could not re-queued all outbound tasks'
)
failures = msApi.get_failed_tasks()['results']
failures = utils.get_page_of_iterator(
failures, settings.FAILURE_LIST_PAGE_SIZE, request.GET.get('page'))
context = {
'failures': failures
}
context.update(csrf(request))
return render(request, 'ci/failures_outbounds.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['HUB'])
def report_generation(request):
hubApi = HubApiClient(
request.session["user_tokens"]["HUB"]["token"],
api_url=request.session["user_tokens"]["HUB"]["url"])
if request.method == "POST":
report_type = request.POST['report_type']
if report_type == 'registration':
reg_form = ReportGenerationForm(
request.POST, auto_id='registration_%s')
posted_form = reg_form
msisdn_form = MsisdnReportGenerationForm(auto_id='cohort_%s')
elif report_type == 'cohort':
msisdn_form = MsisdnReportGenerationForm(
request.POST, request.FILES, auto_id='cohort_%s')
posted_form = msisdn_form
reg_form = ReportGenerationForm(auto_id='registration_%s')
if posted_form.is_valid():
# Remove fields that weren't supplied
if posted_form.cleaned_data.get('start_date') is None:
del posted_form.cleaned_data['start_date']
if posted_form.cleaned_data.get('end_date') is None:
del posted_form.cleaned_data['end_date']
if posted_form.cleaned_data.get('email_to') == []:
del posted_form.cleaned_data['email_to']
if posted_form.cleaned_data.get('email_from') == "":
del posted_form.cleaned_data['email_from']
if posted_form.cleaned_data.get('email_subject') == "":
del posted_form.cleaned_data['email_subject']
try:
results = hubApi.trigger_report_generation(
posted_form.cleaned_data)
except HTTPServiceError as e:
logger.error('Report generation failed: %s' % e.details)
messages.add_message(
request,
messages.ERROR,
'Could not start report generation'
)
else:
if 'report_generation_requested' in results:
messages.add_message(
request,
messages.INFO,
'Successfully started report generation'
)
else:
messages.add_message(
request,
messages.ERROR,
'Could not start report generation'
)
else:
reg_form = ReportGenerationForm(auto_id='registration_%s')
msisdn_form = MsisdnReportGenerationForm(auto_id='cohort_%s')
report_type = ""
report_tasks = hubApi.get_report_tasks()
context = {
"forms": {"registration_form": reg_form, "cohort_form": msisdn_form},
"report_tasks": report_tasks,
"report_type": report_type
}
context.update(csrf(request))
return render(request, 'ci/reports.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['SEED_IDENTITY_SERVICE'])
def user_management(request):
if not settings.SHOW_USER_DETAILS:
return redirect('denied')
hubApi = HubApiClient(
api_url=request.session["user_tokens"]["HUB"]["url"], # noqa
auth_token=request.session["user_tokens"]["HUB"]["token"] # noqa
)
page = int(request.GET.get('page', 1))
filters = {"page": page}
form = UserDetailSearchForm(request.GET)
if form.is_valid():
for key, value in form.cleaned_data.items():
if value:
filters[key] = value
results = hubApi.get_user_details(filters)
states = [('*', 'All')]
for state in hubApi.get_states()['results']:
states.append((state['name'], state['name']))
form.fields['state'] = forms.ChoiceField(choices=states)
context = {}
context['users'] = results['results']
context['has_next'] = results['has_next']
context['has_previous'] = results['has_previous']
context['next_page_number'] = page + 1
context['previous_page_number'] = page - 1
context['form'] = form
return render(request, 'ci/user_management.html', context)
@login_required(login_url='/login/')
@permission_required(permission='ci:view', login_url='/login/')
@tokens_required(['SEED_IDENTITY_SERVICE', 'HUB',
'SEED_STAGE_BASED_MESSAGING'])
def user_management_detail(request, identity):
idApi = IdentityStoreApiClient(
api_url=request.session["user_tokens"]["SEED_IDENTITY_SERVICE"]["url"], # noqa
auth_token=request.session["user_tokens"]["SEED_IDENTITY_SERVICE"]["token"] # noqa
)
sbmApi = StageBasedMessagingApiClient(
api_url=request.session["user_tokens"]["SEED_STAGE_BASED_MESSAGING"]["url"], # noqa
auth_token=request.session["user_tokens"]["SEED_STAGE_BASED_MESSAGING"]["token"] # noqa
)
hubApi = HubApiClient(
api_url=request.session["user_tokens"]["HUB"]["url"], # noqa
auth_token=request.session["user_tokens"]["HUB"]["token"] # noqa
)
messagesets_results = sbmApi.get_messagesets()
messagesets = {}
linked_to = {}
operator_id = {}
for messageset in messagesets_results["results"]:
messagesets[messageset["id"]] = messageset["short_name"]
results = idApi.get_identity(identity)
if results['details'].get('linked_to'):
linked_to = idApi.get_identity(results['details']['linked_to'])
operator_id = results['details'].get('operator', results.get('operator'))
if operator_id:
operator_id = idApi.get_identity(operator_id)
hub_filter = {
settings.IDENTITY_FIELD: identity
}
registrations = hubApi.get_registrations(params=hub_filter)
sbm_filter = {
"identity": identity
}
subscriptions = sbmApi.get_subscriptions(params=sbm_filter)
context = {
"identity": results,
"registrations": registrations,
"messagesets": messagesets,
"subscriptions": subscriptions,
"linked_to": linked_to,
"operator": operator_id
}
context.update(csrf(request))
return render(request, 'ci/user_management_detail.html', context)
| {
"repo_name": "praekelt/seed-control-interface",
"path": "ci/views.py",
"copies": "1",
"size": "46608",
"license": "bsd-3-clause",
"hash": 390333104834868600,
"line_mean": 36.3461538462,
"line_max": 99,
"alpha_frac": 0.5829900446,
"autogenerated": false,
"ratio": 4.180089686098655,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00009014423076923077,
"num_lines": 1248
} |
from functools import wraps
import logging
import os
from random import random
import sys
import time
import structlog
LOG_FORMAT = '%(message)s'
DEFAULT_STREAM = sys.stdout
WRAPPED_DICT_CLASS = structlog.threadlocal.wrap_dict(dict)
ENV_APIG_REQUEST_ID = '_FLEECE_APIG_REQUEST_ID'
ENV_LAMBDA_REQUEST_ID = '_FLEECE_LAMBDA_REQUEST_ID'
def clobber_root_handlers():
[logging.root.removeHandler(handler) for handler in
logging.root.handlers[:]]
class logme(object):
"""Log requests and responses"""
def __init__(self, level=logging.DEBUG, logger=None):
self.level = level
if not logger:
self.logger = logging.getLogger()
else:
self.logger = logger
def __call__(self, func):
def wrapped(*args, **kwargs):
self.logger.log(self.level, "Entering %s", func.__name__)
response = func(*args, **kwargs)
func_response_name = "{0}_response".format(func.__name__)
kwarg = {func_response_name: response}
self.logger.log(self.level, "Exiting %s", func.__name__, **kwarg)
return response
return wrapped
class RetryHandler(logging.Handler):
"""A logging handler that wraps another handler and retries its emit
method if it fails. Useful for handlers that connect to an external
service over the network, such as CloudWatch.
The wait between retries uses an exponential backoff algorithm with full
jitter, as described in
https://www.awsarchitectureblog.com/2015/03/backoff.html.
:param handler the handler to wrap with retries.
:param max_retries the maximum number of retries before giving up. The
default is 5 retries.
:param backoff_base the sleep time before the first retry. This time
doubles after each retry. The default is 0.1s.
:param backoff_cap the max sleep time before a retry. The default is 1s.
:param ignore_errors if set to False, when all retries are exhausted, the
exception raised by the original log call is
re-raised. If set to True, the error is silently
ignored. The default is True.
"""
def __init__(self, handler, max_retries=5, backoff_base=0.1,
backoff_cap=1, ignore_errors=True):
super(RetryHandler, self).__init__()
self.handler = handler
self.max_retries = max_retries
self.backoff_base = backoff_base
self.backoff_cap = backoff_cap
self.ignore_errors = ignore_errors
def emit(self, record):
try:
return self.handler.emit(record)
except Exception as e:
exc = e
sleep = self.backoff_base
for i in range(self.max_retries):
time.sleep(sleep * random())
try:
return self.handler.emit(record)
except:
pass
sleep = min(self.backoff_cap, sleep * 2)
if not self.ignore_errors:
raise exc
def _has_streamhandler(logger, level=None, fmt=LOG_FORMAT,
stream=DEFAULT_STREAM):
"""Check the named logger for an appropriate existing StreamHandler.
This only returns True if a StreamHandler that exaclty matches
our specification is found. If other StreamHandlers are seen,
we assume they were added for a different purpose.
"""
# Ensure we are talking the same type of logging levels
# if they passed in a string we need to convert it to a number
if isinstance(level, basestring):
level = logging.getLevelName(level)
for handler in logger.handlers:
if not isinstance(handler, logging.StreamHandler):
continue
if handler.stream is not stream:
continue
if handler.level != level:
continue
if not handler.formatter or handler.formatter._fmt != fmt:
continue
return True
return False
def inject_request_ids_into_environment(func):
"""Decorator for the Lambda handler to inject request IDs for logging."""
@wraps(func)
def wrapper(event, context):
# This might not always be an API Gateway event, so only log the
# request ID, if it looks like to be coming from there.
if 'requestContext' in event:
os.environ[ENV_APIG_REQUEST_ID] = event['requestContext'].get(
'requestId', 'N/A')
os.environ[ENV_LAMBDA_REQUEST_ID] = context.aws_request_id
return func(event, context)
return wrapper
def add_request_ids_from_environment(logger, name, event_dict):
"""Custom processor adding request IDs to the log event, if available."""
if ENV_APIG_REQUEST_ID in os.environ:
event_dict['api_request_id'] = os.environ[ENV_APIG_REQUEST_ID]
if ENV_LAMBDA_REQUEST_ID in os.environ:
event_dict['lambda_request_id'] = os.environ[ENV_LAMBDA_REQUEST_ID]
return event_dict
def _configure_logger(logger_factory=None, wrapper_class=None):
if not logger_factory:
logger_factory = structlog.stdlib.LoggerFactory()
if not wrapper_class:
wrapper_class = structlog.stdlib.BoundLogger
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
add_request_ids_from_environment,
structlog.stdlib.add_log_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt='iso'),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.JSONRenderer(sort_keys=True)
],
context_class=WRAPPED_DICT_CLASS,
logger_factory=logger_factory,
wrapper_class=wrapper_class,
cache_logger_on_first_use=True)
def setup_root_logger(level=logging.DEBUG, stream=DEFAULT_STREAM,
logger_factory=None):
_configure_logger(logger_factory=logger_factory)
clobber_root_handlers()
root_logger = logging.root
stream_handler = logging.StreamHandler(stream)
stream_handler.setLevel(level)
stream_handler.setFormatter(logging.Formatter(fmt=LOG_FORMAT))
root_logger.addHandler(stream_handler)
root_logger.setLevel(level)
def get_logger(name=None, level=None, stream=DEFAULT_STREAM,
clobber_root_handler=True, logger_factory=None,
wrapper_class=None):
"""Configure and return a logger with structlog and stdlib."""
_configure_logger(
logger_factory=logger_factory,
wrapper_class=wrapper_class)
log = structlog.get_logger(name)
root_logger = logging.root
if log == root_logger:
if not _has_streamhandler(root_logger, level=level, stream=stream):
stream_handler = logging.StreamHandler(stream)
stream_handler.setLevel(level)
stream_handler.setFormatter(logging.Formatter(fmt=LOG_FORMAT))
root_logger.addHandler(stream_handler)
else:
if clobber_root_handler:
for handler in root_logger.handlers:
handler.setFormatter(logging.Formatter(fmt=LOG_FORMAT))
if level:
log.setLevel(level)
return log
getLogger = get_logger
| {
"repo_name": "racker/fleece",
"path": "fleece/log.py",
"copies": "1",
"size": "7350",
"license": "apache-2.0",
"hash": 940165198477439500,
"line_mean": 34.8536585366,
"line_max": 77,
"alpha_frac": 0.6428571429,
"autogenerated": false,
"ratio": 4.126895002807411,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5269752145707411,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
import os
import resource
from tcpy import TCPClient
try:
from memwatchconfig import PROFILER_HOST
except:
from defaultconfig import PROFILER_HOST
try:
from memwatchconfig import PROFILER_PORT
except:
from defaultconfig import PROFILER_PORT
logging.basicConfig()
logger = logging.getLogger("memwatch.memwatch")
def profile(key_name, custom_emit=None):
"""
Returns a decorator which will time a call to a function
and emit a metric to statsite with the peak memory usage.
Example:
@profile("my_function_key", flush_after=True)
def should_give_reward(a, b, c):
....
"""
def decorator(func):
@wraps(func)
def wrapped(*args, **kwargs):
with ProfiledBlock(key_name, custom_emit):
return func(*args, **kwargs)
return wrapped
return decorator
class ProfiledBlock(object):
"""
Implements a context manager that will profile the memory
consumed by a block of code, and emit memory metrics to statsite.
Metrics:
peak_usage: high-water mark for memory usage for a profiled block
unreturned: memory that was not freed after the block was exited (leaks)
Example:
with ProfiledBlock("consume_bytes"):
consume_bytes()
"""
def __init__(self, block_name, custom_emit=None):
self.block_name = block_name
self.pid = os.getpid()
self.emit = custom_emit if custom_emit else self.default_emit
self.profiler = TCPClient(PROFILER_HOST, PROFILER_PORT)
self.start_mem = None
self.units = None
def __enter__(self):
self.start_mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
self.enable()
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
usage_result = self.disable()
if not usage_result.get("success", False):
raise Exception("%s: %s" % (self.block_name, usage_result.get("message")))
peak_usage = usage_result.get("peak_usage", 0)
unreturned = usage_result.get("unreturned", 0)
# Emit the metrics
try:
# We try to use a custom emit function
self.emit(peak_usage, unreturned, self.block_name)
except:
logger.error(custom_emit_fail_msg())
self.default_emit(peak_usage, unreturned, self.block_name)
def enable(self):
# Send our PID and the start signal to the memwatch server
self.profiler.send({"cmd": "profile", "opt": "start", "pid": self.pid})
self.profiler.recv()
def disable(self):
self.profiler.send({"stop": True})
result = self.profiler.recv()
self.profiler.conn.finish()
end_mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
unreturned = end_mem - self.start_mem
result.update({"unreturned": unreturned})
return result
def default_emit(self, peak_usage, unreturned, block_name):
if unreturned > 0:
print ""
print "POSSIBLE LEAK IN %s" % block_name
print "Unreturned memory could be an indication of a memory leak."
print ""
base_line = "================================"
line_match = "=" * (len(block_name) + 1)
line_match += base_line
print "%s %s" % (block_name, base_line)
print "Block Memory Usage"
print " Peak Usage: %s" % peak_usage
print " Unreturned: %s" % unreturned
print line_match
def custom_emit_fail_msg():
msg = "Custom emit function failed.\n"
msg += "Usage/Signature: custom_emit(peak_usage, # float"
msg += " unreturned, # float"
msg += " block_name) # str"
return msg
| {
"repo_name": "ptbrodie/memwatch",
"path": "memwatch/memwatch.py",
"copies": "1",
"size": "3835",
"license": "mit",
"hash": 3371701312084093400,
"line_mean": 31.5,
"line_max": 86,
"alpha_frac": 0.6028683181,
"autogenerated": false,
"ratio": 3.901322482197355,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5004190800297355,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
import os
from . import process
from .utils import which, tempdir
from ._compat import FileExistsError
def ensure_git(return_value=None):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
if which('git'):
return func(*args, **kwargs)
else:
logging.error('git is not installed')
return return_value
return wrapper
return decorator
@ensure_git()
def clone(url, dest=None, depth=None):
if dest and os.path.exists(dest):
raise FileExistsError('Destination already exists: %s' % dest)
dest = dest if dest else tempdir()
cmd = ['git', 'clone', url, dest]
if depth:
cmd += ['--depth', depth]
process.call(cmd)
return dest
class Repository(object):
def __init__(self, path, autopull=None, autopush=None):
self.path = path
self.autopush = autopush
self.autopull = autopull
self.author = "Passpie <passpie@localhost>"
if autopull:
self.pull_rebase(*autopull)
@ensure_git()
def init(self):
cmd = ['git', 'init', self.path]
process.call(cmd)
@ensure_git()
def pull_rebase(self, remote='origin', branch='master'):
cmd = ['git', 'pull', '--rebase', remote, branch]
process.call(cmd, cwd=self.path)
@ensure_git()
def push(self, remote='origin', branch='master'):
cmd = ['git', 'push', remote, branch]
process.call(cmd, cwd=self.path)
@ensure_git()
def add(self, all=False):
if all is True:
cmd = ['git', 'add', '--all', '.']
else:
cmd = ['git', 'add', '.']
process.call(cmd, cwd=self.path)
@ensure_git()
def commit(self, message, add=True):
author_option = "--author={}".format(self.author)
if add:
self.add(all=True)
cmd = ['git', 'commit', author_option, '-m', message]
process.call(cmd, cwd=self.path)
if self.autopush:
self.push()
@ensure_git(return_value=[])
def commit_list(self):
cmd = ['git', 'log', '--reverse', '--pretty=format:%s']
output, _ = process.call(cmd, cwd=self.path)
return output.splitlines()
@ensure_git(return_value=[])
def sha_list(self):
cmd = ['git', 'log', '--reverse', '--pretty=format:%h']
output, _ = process.call(cmd, cwd=self.path)
return output.splitlines()
@ensure_git()
def reset(self, to_index):
try:
sha = self.sha_list()[to_index]
cmd = ['git', 'reset', '--hard', sha]
process.call(cmd, cwd=self.path)
except IndexError:
logging.info('commit on index "{}" not found'.format(to_index))
| {
"repo_name": "scorphus/passpie",
"path": "passpie/history.py",
"copies": "2",
"size": "2814",
"license": "mit",
"hash": 260786851480267460,
"line_mean": 28.0103092784,
"line_max": 75,
"alpha_frac": 0.5572139303,
"autogenerated": false,
"ratio": 3.683246073298429,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 97
} |
from functools import wraps
import logging
import pickle
import sys
import types
from hoplite import client
from hoplite.exceptions import JobFailedError
from globals import HopliteClientSettings
def remotify(module_name, functions=None, add_documentation=True):
"""
Decorator which can be used to add remote capabilities to functions or
classes.
By using this decorator on a function, the module which contains the
function will be given two additional functions which provide the means
for calling the function on a remote machine running Hoplite. The names of
these two functions are remote\_(name of original function) and
remote_async\_(name of original function). The remote\_function calls the
function or method asynchronously on the remote machine, and the
remote_async\_ function returns an object which can be used to run the
function asynchronously on the remote machine.
By using this decorator on a class, the class will be enhanced with
remote\_ and remote_async\_ methods which serve the same purpose as the
remote\_ and remote_async\_ functions described above.
In either case, documentation will be added to the new functions/methods
giving a short description of how to use them and a link to the original
function. This feature can be disabled by passing in False for the
add_documentation parameter.
For additional details, see the above documentation and examples.
:param module_name: The namespace for the module which contains this class
or function. This should simply be passed in as __name__
(e.g. @remotify(__name__)
:type module_name: str
:param functions: A list of names of the methods which should be made
remotable. This only applies when decorating classes. If no list is
provided, then all methods in the class (besides those starting with __)
will be made remotable
:param add_documentation: If true, then the functions or methods which are
added will be given documentation explaining how they are to be used, and
linking to the original function. If false, then they will be given no
documentation, though they will still show up in the Sphinx documentation
if the associated rst file indicates that undocumented functions/methods
should be included
:type add_documentation: bool
:type functions: list of [str]
"""
if functions is None:
functions = []
def inner(obj):
# If decorating a class, then add methods to the class. For some
# reason, if a class which inherits from another class is decorated,
# then the type of object passed in is "type" rather than "class".
# However, the class can still be decorated just fine.
if isinstance(obj, types.ClassType) or isinstance(obj, type):
class_obj = obj # Rename for clarity
class_item_names = dir(class_obj)
for item_name in class_item_names:
if len(functions) > 0 and item_name not in functions:
continue
if isinstance(getattr(class_obj, item_name), types.MethodType):
func = getattr(class_obj, item_name)
name = func.__name__
# Skip "private" functions as well as inherited functions
# that have already been remoted
if name.startswith('__') or hasattr(func, '___remoted_by_hoplite___') \
or hasattr(func, '___is_hoplite_remotable___'):
continue
if name.startswith('remote_') or name.startswith('async_'):
raise AttributeError(
'Unable to add remote capability to function {0}:'
' function cannot begin with "remote_" or '
'"async_"'.format(name))
class_func = wraps(func)(remote_func_builder(name))
async_class_func = wraps(func)(
remote_async_func_builder(name))
class_func.__name__ = 'remote_' + class_func.__name__
async_class_func.__name__ = 'remote_async_' + \
async_class_func.__name__
if add_documentation:
class_func.__doc__ = _get_remote_docstring(
'meth', '{}.{}'.format(
module_name, class_obj.__name__), func.__name__
)
async_class_func.__doc__ = _get_remote_async_docstring(
'meth', '{}.{}'.format(
module_name, class_obj.__name__), func.__name__
)
else:
class_func.__doc__ = None
async_class_func.__doc__ = None
# Need to set attribute on __func__, which is the
# underlying function stored in the instancemethod This
# adds a tag to the function being remotified so it is not
# remoted twice if a remoted class is inherited
setattr(func.__func__, '___remoted_by_hoplite___', True)
# Set attribute to remotable functions for identification
setattr(class_func, '___is_hoplite_remotable___', True)
setattr(
async_class_func, '___is_hoplite_remotable___', True)
setattr(class_obj, 'remote_' + name, class_func)
setattr(
class_obj, 'remote_async_' + name, async_class_func)
# If decorating a module function (not a class function)
elif isinstance(obj, types.FunctionType):
func = obj # Rename for clarity
module = sys.modules[module_name]
name = func.__name__
if name.startswith('remote_') or name.startswith('async_'):
raise AttributeError(
'Unable to add remote capability to function {0}:'
' function cannot begin with "remote_" or "async_"'.format(
name))
mod_func = wraps(func)(
remote_module_func_builder(name, module_name))
async_mod_func = wraps(func)(
remote_module_async_func_builder(name, module_name))
mod_func.__name__ = 'remote_' + mod_func.__name__
async_mod_func.__name__ = 'remote_async_' + async_mod_func.__name__
if add_documentation:
mod_func.__doc__ = _get_remote_docstring(
'func', module_name, func.__name__)
async_mod_func.__doc__ = _get_remote_async_docstring(
'func', module_name, func.__name__)
else:
mod_func.__doc__ = None
async_mod_func.__doc__ = None
# Set attribute to remotable and remoted functions for
# identification
setattr(func, '___remoted_by_hoplite___', True)
setattr(mod_func, '___is_hoplite_remotable___', True)
setattr(async_mod_func, '___is_hoplite_remotable___', True)
setattr(module, 'remote_' + name, mod_func)
setattr(module, 'remote_async_' + name, async_mod_func)
else:
raise RuntimeError(
'Unable to add remote capabilities to object {} which is of'
' type {}'.format(obj.__name__, type(obj))
)
return obj
return inner
def _get_remote_docstring(ref_type, namespace, func_name):
return 'This function calls :{0}:`{1}.{2}` on a remote machine which is ' \
'running a Hoplite server.\n\n' \
':param remote_machine_address: The hostname or IP address of the' \
'remote machine\n' \
':ref_type remote_machine_address: str\n' \
':param args: Positional arguments for {2}\n' \
':param kwargs: Keyword arguments for {2}\n' \
':returns: The value or values returned by {2} after it finishes ' \
' running on the remote machine\n\n' \
'This function raises the same exceptions as {2}. If an error ' \
'occurs in the Hoplite framework, or if the original exception ' \
'raised on the remote machine cannot be raised on the local ' \
'machine, then a JobFailedError (from the Hoplite module) will ' \
'be raised.'.format(
ref_type, namespace, func_name)
def _get_remote_async_docstring(ref_type, namespace, func_name):
return 'This function returns an object which can be used to call ' \
':{0}:`{1}.{2}` asynchronously on a remote machine which is ' \
'running a Hoplite server. \n\n' \
':param remote_machine_address: The hostname or IP address of the' \
' remote machine\n' \
':ref_type remote_machine_address: str\n' \
':param args: Positional arguments for {2}\n' \
':param kwargs: Keyword arguments for {2}\n' \
':returns: An instance of the RemoteAsyncJobWrapper class, which ' \
' can be used to start {2} on the remote machine and query its ' \
'status. This class implements the same public interface as the ' \
'RemoteJob class in Hoplite, and therefore provides methods such ' \
' as start, join, and finished. When join is called, the class '\
'will block until the function finishes executing on the remote ' \
'machine, and then will return the values returned by the ' \
'remotely executed function.\n' \
':rtype: RemoteAsyncJobWrappper\n\n' \
'This function raises the same exceptions as {2}. If an error ' \
'occurs in the Hoplite framework, or if the original exception ' \
'raised on the remote machine cannot be raised on the local ' \
'machine, then a JobFailedError (from the Hoplite module) will ' \
'be raised. Note that any exceptions raised on the remote ' \
'machine will not be called until the status of the job is ' \
'checked, such as when "finished" or "join" are called on the ' \
'RemoteAsyncJobWrappper object.'.format(
ref_type, namespace, func_name)
class RemoteEnablerMetaClass(type):
"""
.. deprecated:: 15.0.0.dev25
Use the :ref:`remotify decorator <remotify>` instead
Add remote capabilities to a class.
If a class opts to use this as its metaclass, then every function in the
class (besides those starting with __) will be enhanced to allow for remote
operation through hoplite. As an example, if there is a function defined
as::
def do_stuff(self, input_1, input_2):
...
Then two additional functions will be added to the class::
def remote_do_stuff(self, remote_machine_address, *args, **kwargs):
...
def remote_async_do_stuff(self, remote_machine_address, *args, **kwargs)
...
In these new functions, \*args and \*\*kwargs represent all of the
arguments required by the original function. "remote_machine_address" is
the IP address or hostname of the machine on which the function will be
run remotely. If the remote machine is running Hoplite on a port other
than the default (5000), then "remote_machine_address" can be given in the
form "address:port"
When the remote_do_stuff function is called, hoplite will attempt to
connect to the Hoplite server on the remote machine and run the function
on it, using the current state of the object on the local machine. In
other words, it should seem as if the function is running on the local
machine, except that the operations themselves will affect *only* the
remote machine. In particular, if the function being run remotely changes
the class instance, those changes will not be reflected on the local
machine. This must be kept in mind when creating classes that will be made
remotable - any changes of state must be sent back to the local machine as
return values. That includes reference variables passed as function
parameters that would normally not need to be returned.
When the remote_async_do_stuff function is called, a RemoteAsyncJobWrapper
object will be returned which can be used to start the function
asynchronously. This object implements the same public functions as the
RemoteJob class, and so can be used in the same way. When the function is
run, it operates in the same way as the remote_do_stuff function does,
except that it is run asynchronously. This means that any exceptions which
occur will not be raised until the job status is checked.
In cases where inheritance is used in a class, the behavior is governed by
the following rules:
- For each class which uses the metaclass, if it inherits from another
class then that parent class must either use the metaclass as well (in
which case all of its functions will be made available to the child as
remotable functions) or the parent class must be a new-style class
(inherit from "object"), in which case its functions will not be
available to the child as remotable functions. If the parent class does
not use the metaclass, and if it is not a new-style class, then a TypeError
will be raised due to some technical issues with metaclasses and
inheritance.
- If a class uses the metaclass, then all classes which inherit from it
(i.e. all of its descendants) will be made remotable. In other words, if a
parent class uses the metaclass, then it as well as its child will be made
remotable. The parent will have access to all its own functions as
remotable functions, and the child will have access not only to all of its
own functions, but also all of the parent's functions, as remotable
functions. This holds true for all descendants.
:returns: The class, enhanced to allow for remote functionality
"""
def __new__(mcs, clsname, bases, dct):
for name, val in dct.items():
if (not name.startswith('__')) and hasattr(val, '__call__'):
# Function must be built in a separate function and then
# assigned here. Otherwise, there are problems with each
# function added to the class actually pointing to the same
# thing.
if name.startswith('remote_') or name.startswith('async_'):
raise AttributeError(
'Unable to add remote capability to function {0}:'
' function cannot begin with "remote_" or '
'"async_"'.format(name))
dct['remote_' + name] = remote_func_builder(name)
dct['remote_async_' + name] = remote_async_func_builder(name)
return type.__new__(mcs, clsname, bases, dct)
def remote_func_builder(function_name):
"""
Build a function that will connect to a remote machine and execute a
function on it.
:param function_name: The name of the class function that will be called on
the remote machine. This is necessary because, even though it is
technically something like remote_do_stuff that is called, it will be
recognized as _remote_func instead.
:returns: Function that, when called, will connect to a remote machine and
execute the function represented by 'function_name'
"""
def _remote_func(self, remote_machine_address, *args, **kwargs):
"""
Call a function on a remote machine.
Note that the class instance is pickled and sent to the remote machine.
This is so the current state of the class will be utilized when the
function is called on the remote machine. There are probably lots of
corner cases in which this will cause problems. For example, you must
be aware of side effects of the original function that you might be
expecting to affect the local machine, because they will not do so.
:param remote_machine_address: IP address or hostname of the remote
machine on which the function will be run.
If the remote machine is running Hoplite on a port other than the
default (5000), then "remote_machine_address" can be specified in
the form "address:port"
:param remote_timeout: Timeout (in floating-point seconds) of the
function
:param *args: Normal arguments being passed to the remotely called
function
:param *kwargs: Keyword arguments being passed to the remotely called
function
:returns: The value(s) returned by the function which was called on the
remote machine
"""
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
remote_timeout = -1
timeout_message = ''
if kwargs.get('remote_timeout') is not None and kwargs['remote_timeout'] > 0.0:
remote_timeout = kwargs['remote_timeout']
kwargs.pop('remote_timeout')
timeout_message = ' with timeout of {} seconds'.format(
remote_timeout)
args_string = args.__str__()
kwargs_string = kwargs.__str__()
if not HopliteClientSettings.debug:
if len(args_string) > 33:
args_string = args_string[0:30] + '...'
if len(kwargs_string) > 33:
kwargs_string = kwargs_string[0:30] + '...'
logger.info('"{0}" on target "{1}" with args: {2} and '
'kwargs: {3}{4}'.format(
function_name,
remote_machine_address,
args_string,
kwargs_string,
timeout_message))
config = {
'args': pickle.dumps(args),
'kwargs': pickle.dumps(kwargs),
'instance': pickle.dumps(self),
'function_name': function_name
}
job = None
try:
job_manager = client.remote_job_manager.RemoteJobManager(
remote_machine_address)
job = job_manager.create_job(
'hoplite.plugins.remote_enabler_job', config)
job.start()
job.join(remote_timeout)
except JobFailedError as e:
if job is None:
logger.error(
'Exception occurred while creating job to call "{0}" on'
' "{1}": {2}'.format(
function_name, remote_machine_address, str(e))
)
else:
logger.error(
'Exception occurred while calling "{0}" on '
' "{1}": {2}'.format(
function_name,
remote_machine_address,
e.__str__())
)
# ALL TRACEBACK ENTRIES BELOW THIS ARE FROM THE REMOTE MACHINE
e.raise_remote_exception()
return_values = pickle.loads(job.status().get('return_values'))
if return_values is None:
return None
# Convert return value into either a single value, or a tuple, so that
# it appears the same as if the function were called on the local
# machine
if len(return_values) > 1:
return_object = tuple(return_values)
else:
return_object = return_values[0]
return_object_string = return_object.__str__()
# limit return object string if not debugging
if not HopliteClientSettings.debug and len(return_object_string) > 50:
return_object_string = return_object_string[0:47] + '...'
logger.debug('"{0}" on target "{1}" returned {2}'.format(
function_name,
remote_machine_address,
return_object_string))
return return_object
return _remote_func
def remote_async_func_builder(function_name):
"""
Build a function that will connect to a remote machine and create a job
wrapper that can be used to run a function asynchronously
:param function_name: The name of the class function that will be called on
the remote machine.
:returns: Function that, when called, will connect to a remote machine and
create then return a job wrapper for running the specified function
"""
def _remote_async_func(self, remote_machine_address, *args, **kwargs):
"""
Create a job on a remote machine and return a wrapper so that it has
the same interface as a job running on the local machine.
***NOTE*** It seems that join must always be eventually called on jobs,
even if they have already finished. Otherwise, a process gets orphaned,
causing problems with future operations.
:param remote_machine_address: IP address or hostname of the remote
machine on which the function will be run. If the remote machine
is running Hoplite on a port other than the default (5000), then
the port can be specified in the form "address:port"
:param *args: Normal arguments being passed to the remotely called
function
:param *kwargs: Keyword arguments being passed to the remotely called
function
:returns: A wrapper around the remote job
"""
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
logger.debug(
'Creating job "{0}" on target "{1}" with args: {2} and kwargs:'
' {3}'.format(
function_name, remote_machine_address, args, kwargs)
)
config = {
'args': pickle.dumps(args),
'kwargs': pickle.dumps(kwargs),
'instance': pickle.dumps(self),
'function_name': function_name
}
job_manager = client.remote_job_manager.RemoteJobManager(
remote_machine_address)
job = job_manager.create_job(
'hoplite.plugins.remote_enabler_job', config)
return RemoteAsyncJobWrapper(job, function_name)
return _remote_async_func
def remote_module_func_builder(function_name, module_name):
"""
Build a function that will connect to a remote machine and execute a
function on it.
:param function_name: The name of the function that will be called on the
remote machine.
:returns: Function that, when called, will connect to a remote machine and
execute the function represented by 'function_name'
"""
def _remote_module_func(remote_machine_address, *args, **kwargs):
"""
Call a function on a remote machine.
Note that the class instance is pickled and sent to the remote machine.
This is so the current state of the class will be utilized when the
function is called on the remote machine. There are probably lots of
corner cases in which this will cause problems, so be aware of side
effects of the original function.
:param remote_machine_address: IP address or hostname of the remote
machine on which the function will be run. If the remote machine
is running Hoplite on a port other than the default (5000), then
the port can be specified in the form "address:port"
:param *args: Normal arguments being passed to the remotely called
function
:param remote_timeout:
:param *kwargs: Keyword arguments being passed to the remotely called
function
:returns: The value(s) returned by the function which was called on the
remote machine
"""
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
remote_timeout = -1
timeout_message = ''
if kwargs.get('remote_timeout') is not None and kwargs['remote_timeout'] > 0.0:
remote_timeout = kwargs['remote_timeout']
kwargs.pop('remote_timeout')
timeout_message = ' with timeout of {} seconds'.format(
remote_timeout)
args_string = args.__str__()
kwargs_string = kwargs.__str__()
if not HopliteClientSettings.debug:
if len(args_string) > 33:
args_string = args_string[0:30] + '...'
if len(kwargs_string) > 33:
kwargs_string = kwargs_string[0:30] + '...'
logger.info('"{0}" on target "{1}" with args: {2} and '
'kwargs: {3} {4}'.format(
function_name,
remote_machine_address,
args_string,
kwargs_string,
timeout_message))
config = {
'args': pickle.dumps(args),
'kwargs': pickle.dumps(kwargs),
'module_name': module_name,
'function_name': function_name
}
job = None
try:
job_manager = client.remote_job_manager.RemoteJobManager(
remote_machine_address)
job = job_manager.create_job(
'hoplite.plugins.remote_enabler_module_job', config)
job.start()
job.join(remote_timeout)
except JobFailedError as e:
if job is None:
logger.error(
'Exception occurred while creating job to call "{0}" '
'on "{1}": {2}'.format(
function_name, remote_machine_address, str(e))
)
else:
logger.error(
'Exception occurred while calling "{0}" on "{1}": '
'{2}'.format(
function_name, remote_machine_address, e.__str__())
)
# ALL TRACEBACK ENTRIES BELOW THIS ARE FROM THE REMOTE MACHINE
e.raise_remote_exception()
return_values = pickle.loads(job.status().get('return_values'))
if return_values is None:
return None
# Convert return value into either a single value, or a tuple, so that
# it appears the same as if the function were called on the local
# machine
if len(return_values) > 1:
return_object = tuple(return_values)
else:
return_object = return_values[0]
return_object_string = return_object.__str__()
# limit return object string if not debugging
if not HopliteClientSettings.debug and len(return_object_string) > 50:
return_object_string = return_object_string[0:47] + '...'
logger.debug(
'"{0}" on target "{1}" returned {2}'.format(function_name,
remote_machine_address,
return_object_string))
return return_object
return _remote_module_func
def remote_module_async_func_builder(function_name, module_name):
"""
Build a function that will connect to a remote machine and create a job
wrapper that can be used to run a function asynchronously
:param function_name: The name of the class function that will be called on
the remote machine.
:returns: Function that, when called, will connect to a remote machine and
create then return a job wrapper for running the specified function
"""
def _remote_async_module_func(remote_machine_address, *args, **kwargs):
"""
Create a job on a remote machine and return a wrapper so that it has
the same interface as a job running on the local machine.
***NOTE*** It seems that join must always be eventually called on jobs,
even if they have already finished. Otherwise, a process gets orphaned,
causing problems with future operations.
:param remote_machine_address: IP address or hostname of the remote
machine on which the function will be run. If the remote machine is
running Hoplite on a port other than the default (5000), then the
port can be specified in the form "address:port"
:param *args: Normal arguments being passed to the remotely called
function
:param *kwargs: Keyword arguments being passed to the remotely called
function
:returns: A wrapper around the remote job
"""
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
args_string = args.__str__()
kwargs_string = kwargs.__str__()
if not HopliteClientSettings.debug:
if len(args_string) > 33:
args_string = args_string[0:30] + '...'
if len(kwargs_string) > 33:
kwargs_string = kwargs_string[0:30] + '...'
logger.info('Creating job "{0}" on target "{1}" with args: {2} and '
'kwargs: {3}'.format(
function_name,
remote_machine_address,
args_string,
kwargs_string))
config = {
'args': pickle.dumps(args),
'kwargs': pickle.dumps(kwargs),
'module_name': module_name,
'function_name': function_name
}
job_manager = client.remote_job_manager.RemoteJobManager(
remote_machine_address)
job = job_manager.create_job(
'hoplite.plugins.remote_enabler_module_job', config)
return RemoteAsyncJobWrapper(job, function_name)
return _remote_async_module_func
class RemoteAsyncJobWrapper:
"""
This class is a wrapper around the RemoteJob class, and is used for
asynchronously running functions which are called remotely on another
machine. It implements the same public methods as the RemoteJob class,
and the reader should refer to that module for additional information on
how to use it.
"""
def __init__(self, job, function_name):
self.job = job
self.function_name = function_name
self.logger = logging.getLogger(__name__)
self.logger.addHandler(logging.NullHandler())
def start(self):
"""
Start running the function.
"""
self.logger.debug(
'Starting "{0}({1})" on "{2}:{3}"'.format(self.function_name,
self.job.uuid,
self.job.address,
self.job.port))
self.job.start()
def join(self, remote_timeout=-1):
"""
Join the function once it has been started.
This differs from the join function in RemoteJob in that, upon
completion, it returns the values returned by the remotely executed
function.
:param remote_timeout: Optional timeout in seconds. -1 Implies no
timeout
:type remote_timeout: int
:return: Values returned by the remotely executed function
"""
self.logger.debug('Joining "{0}" on "{1}:{2}"'.format(
self.function_name, self.job.address, self.job.port))
try:
self.job.join(remote_timeout)
except JobFailedError as e:
self.logger.error(
'Exception occurred while calling "{0}" on target "{1}":'
' {2}'.format(
self.function_name, self.job.address, e.__str__())
)
# ALL TRACEBACK ENTRIES BELOW THIS ARE FROM THE REMOTE MACHINE
e.raise_remote_exception()
return_values = pickle.loads(self.job.status().get('return_values'))
if return_values is None:
return None
# Convert return value into either a single value, or a tuple, so that
# it appears the same as if the function were called on the local
# machine
if len(return_values) > 1:
return_object = tuple(return_values)
else:
return_object = return_values[0]
return_object_string = return_object.__str__()
if not HopliteClientSettings.debug and (len(return_object_string) > 50):
return_object_string = return_object_string[0:47] + '...'
self.logger.debug(
'"{0}" on target "{1}:{2}" returned {3}'.format(
self.function_name,
self.job.address,
self.job.port,
return_object_string))
return return_object
def config(self, force=False):
"""
Get the configuration dictionary for the job.
"""
return self.job.config(force)
def status(self, force=False):
"""
Check the status of the function's execution. This will raise an
exception if the function has encountered an error since the last time
the status was checked.
:param force:
:return:
"""
return self.job.status(force)
def kill(self, force=False):
"""
Attempt to terminate the job.
"""
return self.job.kill(force)
def running(self, force=False):
"""
Check if the job is still running.
"""
return self.job.running(force)
def finished(self, force=False):
"""
Check if the job is finished.
"""
return self.job.finished(force)
# This is used as an example of how remoted functions get automatically
# documented. Do not use.
@remotify(__name__)
def my_func(arg1, arg2):
"""Function which does stuff.
My_func is a function which does nothing that is particularly useful. It
serves primarily as an example.
:param arg1: A number which will be printed to the console
:type arg1: int
:param arg2: A string which will also be printed
:type arg2: str
:returns: The concatenated number and string
"""
concat_string = str(arg1) + arg2
print concat_string
return concat_string
# This is used as an example of how remoted classes get automatically
# documented. Do not use.
@remotify(__name__)
class Foo(object):
def __init__(self, val_1):
"""Initialize a new instance of the Foo class.
:param val_1: Any value. Doesn't matter what value it is
"""
self.val_1 = val_1
def print_a_val(self):
"""Prints a value.
Prints the value that was passed in when the class instance was
initialized.
"""
print self.val_1
def print_another_val(self, another_val):
"""Prints another value.
Prints a value passed in by the user. It can be the same as the other
value if desired.
:param another_val: Another value. Doesn't matter what value it is
"""
print another_val
| {
"repo_name": "ni/hoplite",
"path": "hoplite/remote_enabler.py",
"copies": "1",
"size": "35192",
"license": "mit",
"hash": -1014597316116008100,
"line_mean": 43.4343434343,
"line_max": 91,
"alpha_frac": 0.5964992044,
"autogenerated": false,
"ratio": 4.568609632610671,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5665108837010671,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
import random
import string
from django.db import connections
from django.db import models
from django.db.models import EmailField
from django.db.models.signals import pre_migrate
from django.dispatch import receiver
# Credit: http://stackoverflow.com/questions/15624817/have-loaddata-ignore-or-disable-post-save-signals
def disable_for_loaddata(signal_handler):
"""
Decorator that turns off signal handlers when loading fixture data.
"""
@wraps(signal_handler)
def wrapper(*args, **kwargs):
if 'raw' in kwargs and kwargs['raw']:
return
signal_handler(*args, **kwargs)
return wrapper
class InstanceDoesNotRequireFieldsMixin(object):
""" Mixin that will only validate form fields that are being saved """
def _clean_fields(self):
if self.instance:
for name, field in self.fields.items():
if name not in self.data:
attr = getattr(self.instance, name)
if attr:
self.data[name] = attr
return super(InstanceDoesNotRequireFieldsMixin, self)._clean_fields()
def clean(self):
if self.instance:
for name, field in self.fields.items():
if name not in self.cleaned_data:
attr = getattr(self.instance, name)
if attr:
self.cleaned_data[name] = attr
return super(InstanceDoesNotRequireFieldsMixin, self).clean()
class RandomField(models.CharField):
MAX_LOOPS = 10
def __init__(self, seed=string.ascii_lowercase + string.digits, *args, **kwargs):
self.seed = seed
super(RandomField, self).__init__(*args, **kwargs)
def contribute_to_class(self, class_, key):
super(RandomField, self).contribute_to_class(class_, key)
models.signals.pre_save.connect(self.generate_unique, sender=class_)
models.signals.post_migrate.connect(self.generate_unique, sender=class_)
def generate_unique(self, sender, instance, *args, **kwargs):
if not getattr(instance, self.attname):
value = None
for i in range(0, RandomField.MAX_LOOPS):
value = ''.join(random.choice(self.seed) for x in range(self.max_length))
if sender.objects.filter(**{self.name: value}).count() > 0:
value = None
else:
break
if i == RandomField.MAX_LOOPS:
error = "Could not generate a unique field for field %s.%s!" % (sender._meta.module_name, self.name)
logging.error(error)
return
elif i >= RandomField.MAX_LOOPS * 2/3:
logging.warning("Looped 2/3 the max allowable loops for unique field on %s.%s consider upping the length of the keys" % (sender._meta.module_name, self.name))
setattr(instance, self.attname, value)
#
# From https://github.com/gbourdin/django-ci-emailfield/
#
# Python 2/3 compatibility. Credit to https://github.com/oxplot/fysom/issues/1
try:
unicode = unicode
except NameError:
# 'unicode' is undefined, must be Python 3
str = str
unicode = str
bytes = bytes
basestring = (str, bytes)
else:
# 'unicode' exists, must be Python 2
str = str
unicode = unicode
bytes = str
basestring = basestring
@receiver(pre_migrate)
def setup_postgres_extensions(sender, **kwargs):
conn = connections[kwargs['using']]
if conn.vendor == 'postgresql':
cursor = conn.cursor()
cursor.execute("CREATE EXTENSION IF NOT EXISTS citext")
class CiEmailField(EmailField):
"""A case insensitive EmailField.
It uses the CITEXT extension on postgresql and lowercases the value on
other databases.
"""
def db_type(self, connection):
if connection.vendor == 'postgresql':
return 'CITEXT'
return super(CiEmailField, self).db_type(connection)
def get_db_prep_value(self, value, connection, prepared=False):
if connection.vendor != 'postgresql':
if isinstance(value, basestring): # value might be None
value = value.lower()
return super(CiEmailField, self).get_db_prep_value(
value, connection, prepared)
| {
"repo_name": "cdelguercio/slothauth",
"path": "slothauth/utils.py",
"copies": "1",
"size": "4328",
"license": "apache-2.0",
"hash": 7978956299909692000,
"line_mean": 33.3492063492,
"line_max": 174,
"alpha_frac": 0.6282347505,
"autogenerated": false,
"ratio": 4.117982873453854,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5246217623953854,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
import re
import time
import six
from .exceptions import LcdCommandError
logger = logging.getLogger(__name__)
COMMANDS = {}
def command(fn):
@wraps(fn)
def wrapped(*args):
self = args[0]
logger.debug(
'Executing %s%s',
fn.func_name,
args
)
response = fn(*args)
logger.debug(
'Response %s',
response
)
if response:
self.send_manager_data('response', response)
COMMANDS[fn.func_name] = wrapped
return fn
class CallableLcdCommand(object):
def __init__(self, manager, command):
self._manager = manager
self._command = command
def __call__(self, *args):
result = self._command(*args)
self._manager.send(result)
@six.python_2_unicode_compatible
class LcdCommand(object):
COMMAND_PREFIX = '\xfe'
def __init__(self, byte, args=None, prefix='\n'):
if args is None:
args = []
self._byte = byte
self._args = args
self._prefix = prefix
def build_command(self, *args):
cmd = self.COMMAND_PREFIX
if len(args) != len(self._args):
raise LcdCommandError(
"Argument count mismatch; expected {expected}, but "
"only {actual} were received.".format(
expected=len(self._args),
actual=len(args),
)
)
cmd += self._byte
for idx, processor in enumerate(self._args):
arg = args[idx]
cmd += str(processor(arg))
return cmd
def __call__(self, *args):
return self.build_command(*args)
def __str__(self):
return u'LCD Command "{command}"'.format(
command=self._byte.encode('string-escape')
)
@six.python_2_unicode_compatible
class LcdClient(object):
COMMANDS = {
'on': LcdCommand('\x42', args=[chr]),
'off': LcdCommand('\x46'),
'set_brightness': LcdCommand('\x99', args=[chr]),
'set_contrast': LcdCommand('\x50', args=[chr]),
'enable_autoscroll': LcdCommand('\x51'),
'disable_autoscroll': LcdCommand('\x52'),
'clear': LcdCommand('\x58'),
'set_splash_screen': LcdCommand('\x40', args=[str]),
'set_cursor_position': LcdCommand('\x47', args=[chr, chr]),
'cursor_home': LcdCommand('\x48', prefix=''),
'cursor_backward': LcdCommand('\x4c', prefix=''),
'cursor_forward': LcdCommand('\x4d', prefix=''),
'cursor_underline_on': LcdCommand('\x4a', prefix=''),
'cursor_underline_off': LcdCommand('\x4b', prefix=''),
'cursor_block_on': LcdCommand('\x53', prefix=''),
'cursor_block_off': LcdCommand('\x54', prefix=''),
'set_backlight_color': LcdCommand('\xd0', args=[chr, chr, chr]),
'set_lcd_size': LcdCommand('\xd1', args=[chr, chr]),
'gpo_off': LcdCommand('\x56'),
'gpo_on': LcdCommand('\x57'),
}
def __init__(self, device_path):
self.device_path = device_path
def __getattr__(self, name):
if name not in self.COMMANDS:
raise AttributeError(name)
return CallableLcdCommand(self, self.COMMANDS[name])
def send(self, cmd):
logger.debug(
'Sending command: "%s"' % cmd.encode('string-escape')
)
try:
with open(self.device_path, 'wb') as dev:
dev.write(cmd)
except IOError:
logger.error(
'Device unavailable; command \'%s\' dropped.',
cmd
)
def send_text(self, text):
self.send(text.encode('ascii', 'replace'))
def __str__(self):
return 'LCD Screen at {path}'.format(path=self.device_path)
class LcdManager(object):
def __init__(
self, device_path, pipe=None, size=None,
blink_interval=0.25, text_cycle_interval=2, size_x=16, size_y=2
):
self.client = LcdClient(device_path)
self.pipe = pipe
if not size:
size = [16, 2]
self.size = size
self.message = ''
self.message_lines = []
self.color = 0, 0, 0
self.backlight = True
self.sleep = 0.1
self.blink = []
self.blink_idx = 0
self.blink_counter = 0
self.blink_interval = int(
(1.0 / self.sleep) * blink_interval
)
self.text_idx = 0
self.text_cycle_counter = 0
self.text_cycle_interval = int(
(1.0 / self.sleep) * text_cycle_interval
)
def initialize(self):
self.client.disable_autoscroll()
self.clear()
def run(self):
while True:
if self.pipe.poll():
cmd, args = self.pipe.recv()
args.insert(0, self)
if cmd in COMMANDS:
COMMANDS[cmd](*args)
else:
logger.error(
'Received unknown command \'%s\' from manager.',
cmd
)
self.send_manager_data(
'error', 'Command %s does not exist' % cmd
)
if self.blink_counter >= self.blink_interval:
self.blink_counter = 0
self.handle_blink()
else:
self.blink_counter += 1
if self.text_cycle_counter >= self.text_cycle_interval:
self.text_cycle_counter = 0
self.handle_text_cycle()
else:
self.text_cycle_counter += 1
time.sleep(self.sleep)
def handle_text_cycle(self):
if len(self.message_lines) <= self.text_idx:
self.text_idx = 0
self.client.cursor_home()
cleaned_lines = [
line.ljust(self.size[0])
for line in self.message_lines[
self.text_idx:self.text_idx+self.size[1]
]
]
display_text = ''.join(cleaned_lines)[0:self.size[0]*self.size[1]]
if not display_text:
self.off()
self.client.send_text(display_text)
self.text_idx += 2
def handle_blink(self):
if not self.blink:
return
self.blink_idx += 1
if len(self.blink) <= self.blink_idx:
self.blink_idx = 0
self.client.set_backlight_color(
*self.blink[self.blink_idx]
)
def send_manager_data(self, msg, data=None):
if not data:
data = []
if not isinstance(data, (list, tuple)):
data = [data, ]
self.pipe.send((
msg, data
))
def get_message_lines(self, message):
lines = []
original_lines = re.split('\r|\n', message)
for line in original_lines:
lines.extend(
line[i:i+self.size[0]]
for i in range(0, len(line), self.size[0])
)
return lines
@command
def set_contrast(self, value):
logger.debug('Setting contrast to %s', value)
self.client.set_contrast(value)
@command
def set_brightness(self, value):
logger.debug('Setting brightness to %s', value)
self.client.set_brightness(value)
@command
def message(self, message):
backlight = message.get('backlight', True)
text = message.get('message', '')
blink = message.get('blink', [])
color = message.get('color', [255, 255, 255])
# If the backlight is off, just turn it off and be done with it.
if not backlight:
self.off()
return
if self.message != text:
self.set_message(text)
if blink and self.blink != blink:
self.set_blink(blink)
if not blink:
self.set_blink([])
if (
not self.blink and
color != self.color
):
self.set_backlight_color(color)
if backlight != self.backlight:
if backlight:
self.on()
else:
self.off()
@command
def set_blink(self, colors):
self.blink = colors
self.blink_idx = 0
if self.blink:
self.set_backlight_color(self.blink[self.blink_idx])
@command
def set_message(self, message):
logger.debug('Setting message \'%s\'', message)
self.clear()
self.text_idx = 0
self.message = message.replace('\n', '')
self.message_lines = self.get_message_lines(self.message)
self.handle_text_cycle()
@command
def off(self, *args):
logger.debug('Setting backlight to off')
self.backlight = False
self.client.off()
@command
def on(self, *args):
logger.debug('Setting backlight to on')
self.backlight = True
self.client.on(255)
@command
def clear(self, *args):
self.message = ''
self.text_idx = 0
self.message_lines = []
self.client.clear()
@command
def set_backlight_color(self, color):
logger.debug('Setting backlight color to %s', color)
self.color = color
self.client.set_backlight_color(*color)
| {
"repo_name": "coddingtonbear/twoline",
"path": "twoline/lcd.py",
"copies": "1",
"size": "9333",
"license": "mit",
"hash": 8197905847075881000,
"line_mean": 27.1963746224,
"line_max": 74,
"alpha_frac": 0.5220186435,
"autogenerated": false,
"ratio": 3.880665280665281,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4902683924165281,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
import re
import traceback
from django.http import HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from watchman import settings
logger = logging.getLogger('watchman')
def check(func):
"""
Decorator which wraps checks and returns an error response on failure.
"""
def wrapped(*args, **kwargs):
check_name = func.__name__
arg_name = None
if args:
arg_name = args[0]
try:
if arg_name:
logger.debug("Checking '%s' for '%s'", check_name, arg_name)
else:
logger.debug("Checking '%s'", check_name)
response = func(*args, **kwargs)
except Exception as e:
message = str(e)
response = {
"ok": False,
"error": message,
"stacktrace": traceback.format_exc(),
}
# The check contains several individual checks (e.g., one per
# database). Preface the results by name.
if arg_name:
response = {arg_name: response}
logger.exception(
"Error calling '%s' for '%s': %s",
check_name,
arg_name,
message
)
else:
logger.exception(
"Error calling '%s': %s",
check_name,
message
)
return response
return wrapped
def token_required(view_func):
"""
Decorator which ensures that one of the WATCHMAN_TOKENS is provided if set.
WATCHMAN_TOKEN_NAME can also be set if the token GET parameter must be
customized.
"""
def _parse_auth_header(auth_header):
"""
Parse the `Authorization` header
Expected format: `WATCHMAN-TOKEN Token="ABC123"`
"""
# TODO: Figure out full set of allowed characters
# http://stackoverflow.com/questions/19028068/illegal-characters-in-http-headers
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec2.html#sec2.2
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2
reg = re.compile('(\w+)[=] ?"?([\w-]+)"?') # noqa: W605
header_dict = dict(reg.findall(auth_header))
return header_dict['Token']
def _get_passed_token(request):
"""
Try to get the passed token, starting with the header and fall back to `GET` param
"""
try:
auth_header = request.META['HTTP_AUTHORIZATION']
token = _parse_auth_header(auth_header)
except KeyError:
token = request.GET.get(settings.WATCHMAN_TOKEN_NAME)
return token
def _validate_token(request):
if settings.WATCHMAN_TOKENS:
watchman_tokens = settings.WATCHMAN_TOKENS.split(',')
elif settings.WATCHMAN_TOKEN:
watchman_tokens = [settings.WATCHMAN_TOKEN, ]
else:
return True
return _get_passed_token(request) in watchman_tokens
@csrf_exempt
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
if _validate_token(request):
return view_func(request, *args, **kwargs)
return HttpResponseForbidden()
return _wrapped_view
if settings.WATCHMAN_AUTH_DECORATOR is None:
def auth(view_func):
@csrf_exempt
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
return view_func(request, *args, **kwargs)
return _wrapped_view
elif settings.WATCHMAN_AUTH_DECORATOR == 'watchman.decorators.token_required':
# Avoid import loops
auth = token_required
else:
try:
from importlib import import_module
except ImportError: # Django < 1.8
from django.utils.importlib import import_module
mod_name, dec = settings.WATCHMAN_AUTH_DECORATOR.rsplit('.', 1)
auth = getattr(import_module(mod_name), dec)
| {
"repo_name": "JBKahn/django-watchman",
"path": "watchman/decorators.py",
"copies": "1",
"size": "4028",
"license": "bsd-3-clause",
"hash": 4631627798513763000,
"line_mean": 29.5151515152,
"line_max": 90,
"alpha_frac": 0.5752234359,
"autogenerated": false,
"ratio": 4.118609406952965,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5193832842852965,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
import requests
from celery import shared_task
from collections import Counter
from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.db import transaction
from django.template.loader import render_to_string
from pontoon.base.models import (
Entity,
Locale,
Resource,
TranslatedResource,
)
from pontoon.sync.changeset import ChangeSet
from pontoon.sync.vcs.models import VCSProject
log = logging.getLogger(__name__)
def update_originals(db_project, now, force=False):
vcs_project = VCSProject(db_project, locales=[], force=force)
with transaction.atomic():
added_paths, removed_paths, changed_paths = update_resources(
db_project, vcs_project
)
changeset = ChangeSet(db_project, vcs_project, now)
update_entities(db_project, vcs_project, changeset)
changeset.execute()
return added_paths, removed_paths, changed_paths, changeset.new_entities
def serial_task(timeout, lock_key="", on_error=None, **celery_args):
"""
Decorator ensures that there's only one running task with given task_name.
Decorated tasks are bound tasks, meaning their first argument is always their Task instance
:param timeout: time after which lock is released.
:param lock_key: allows to define different lock for respective parameters of task.
:param on_error: callback to be executed if an error is raised.
:param celery_args: argument passed to celery's shared_task decorator.
"""
def wrapper(func):
@shared_task(bind=True, **celery_args)
@wraps(func)
def wrapped_func(self, *args, **kwargs):
lock_name = "serial_task.{}[{}]".format(
self.name, lock_key.format(*args, **kwargs)
)
# Acquire the lock
if not cache.add(lock_name, True, timeout=timeout):
error = RuntimeError(
"Can't execute task '{}' because the previously called "
"task is still running.".format(lock_name)
)
if callable(on_error):
on_error(error, *args, **kwargs)
raise error
try:
return func(self, *args, **kwargs)
finally:
# release the lock
cache.delete(lock_name)
return wrapped_func
return wrapper
def collect_entities(db_project, vcs_project, changed_resources):
"""
Find all the entities in the database and on the filesystem and
match them together, yielding tuples of the form
(entity_key, database_entity, vcs_entity).
When a match isn't found, the missing entity will be None.
"""
db_entities = get_db_entities(db_project, changed_resources)
vcs_entities = get_vcs_entities(vcs_project)
entity_keys = set().union(db_entities.keys(), vcs_entities.keys())
for key in entity_keys:
yield key, db_entities.get(key, None), vcs_entities.get(key, None)
def update_entities(db_project, vcs_project, changeset):
changed_resources = vcs_project.changed_files
for key, db_entity, vcs_entity in collect_entities(
db_project, vcs_project, changed_resources
):
if vcs_entity is None:
if db_entity is None:
# This should never happen. What? Hard abort.
raise ValueError(f"No entities found for key `{key}`")
else:
# VCS no longer has the entity, obsolete it.
changeset.obsolete_db_entity(db_entity)
elif db_entity is None:
# New VCS entities are added to Pontoon.
changeset.create_db_entity(vcs_entity)
else:
changeset.update_db_source_entity(db_entity, vcs_entity)
def update_resources(db_project, vcs_project):
"""Update the database on what resource files exist in VCS."""
log.debug(f"Scanning {vcs_project.source_directory_path}")
vcs_changed_files, vcs_removed_files = vcs_project.changed_source_files
removed_resources = db_project.resources.filter(path__in=vcs_removed_files)
removed_paths = removed_resources.values_list("path", flat=True)
changed_resources = db_project.resources.filter(path__in=vcs_changed_files)
changed_paths = changed_resources.values_list("path", flat=True)
added_paths = []
log.debug("Removed files: {}".format(", ".join(removed_paths) or "None"))
removed_resources.delete()
for relative_path, vcs_resource in vcs_project.resources.items():
resource, created = db_project.resources.get_or_create(path=relative_path)
resource.format = Resource.get_path_format(relative_path)
resource.total_strings = len(vcs_resource.entities)
resource.save()
if created:
added_paths.append(relative_path)
log.debug("Added files: {}".format(", ".join(added_paths) or "None"))
return added_paths, removed_paths, changed_paths
def get_changed_resources(db_project, vcs_project):
changed_resources = vcs_project.changed_files
if db_project.unsynced_locales:
changed_resources = None
if changed_resources is not None:
changed_resources = (
list(changed_resources.keys())
+ list(vcs_project.added_paths)
+ list(vcs_project.changed_paths)
)
return changed_resources
def update_translations(db_project, vcs_project, locale, changeset):
changed_resources = get_changed_resources(db_project, vcs_project)
all_entities = collect_entities(db_project, vcs_project, changed_resources)
for key, db_entity, vcs_entity in all_entities:
# If we don't have both the db_entity and vcs_entity we can't
# do anything with the translations.
if db_entity is None or vcs_entity is None:
continue
if not vcs_entity.has_translation_for(locale.code):
# VCS lacks an entity for this locale, so we can't
# pull updates nor edit it. Skip it!
continue
if db_entity.has_changed(locale):
# Pontoon changes overwrite whatever VCS has.
changeset.update_vcs_entity(locale, db_entity, vcs_entity)
else:
# If Pontoon has nothing or has not changed, and the VCS
# still has the entity, update Pontoon with whatever may
# have changed.
changeset.update_db_entity(locale, db_entity, vcs_entity)
def update_translated_resources(db_project, vcs_project, locale):
"""
Update the TranslatedResource entries in the database.
Returns true if a new TranslatedResource is added to the locale.
"""
if vcs_project.configuration:
return update_translated_resources_with_config(db_project, vcs_project, locale,)
else:
return update_translated_resources_without_config(
db_project, vcs_project, locale,
)
def update_translated_resources_with_config(db_project, vcs_project, locale):
"""
Create/update the TranslatedResource objects for each Resource instance
that is enabled for the given locale through project configuration.
"""
tr_created = False
for resource in vcs_project.configuration.locale_resources(locale):
translatedresource, created = TranslatedResource.objects.get_or_create(
resource=resource, locale=locale
)
if created:
tr_created = True
translatedresource.calculate_stats()
return tr_created
def update_translated_resources_without_config(db_project, vcs_project, locale):
"""
We only want to create/update the TranslatedResource object if the
resource exists in the current locale, UNLESS the file is asymmetric.
"""
tr_created = False
for resource in db_project.resources.all():
vcs_resource = vcs_project.resources.get(resource.path, None)
if vcs_resource is not None:
resource_exists = vcs_resource.files.get(locale) is not None
if resource_exists or resource.is_asymmetric:
translatedresource, created = TranslatedResource.objects.get_or_create(
resource=resource, locale=locale
)
if created:
tr_created = True
translatedresource.calculate_stats()
return tr_created
def update_translated_resources_no_files(db_project, locale, changed_resources):
"""
Create/update TranslatedResource entries if files aren't available. This typically happens when
originals change and translations don't, so we don't pull locale repositories.
"""
for resource in changed_resources:
# We can only update asymmetric (monolingual) TranslatedResources. For bilingual files we
# only create TranslatedResources if the file is present in the repository for the locale,
# which we cannot check without files.
if not resource.is_asymmetric:
log.error(
"Unable to calculate stats for asymmetric resource: {resource}.".format(
resource
)
)
continue
translatedresource, _ = TranslatedResource.objects.get_or_create(
resource=resource, locale=locale
)
translatedresource.calculate_stats()
def get_vcs_entities(vcs_project):
return {entity_key(entity): entity for entity in vcs_project.entities}
def get_changed_entities(db_project, changed_resources):
entities = (
Entity.objects.select_related("resource")
.prefetch_related("changed_locales")
.filter(resource__project=db_project, obsolete=False)
)
if changed_resources is not None:
entities = entities.filter(resource__path__in=changed_resources)
return entities
def get_db_entities(db_project, changed_resources=None):
return {
entity_key(entity): entity
for entity in get_changed_entities(db_project, changed_resources)
}
def entity_key(entity):
"""
Generate a key for the given entity that is unique within the
project.
"""
key = entity.key or entity.string
return ":".join([entity.resource.path, key])
def has_repo_changed(last_synced_revisions, pulled_revisions):
has_changed = False
# If any revision is None, we can't be sure if a change
# happened or not, so we default to assuming it did.
unsure_change = None in pulled_revisions.values()
if unsure_change or pulled_revisions != last_synced_revisions:
has_changed = True
return has_changed
def pull_source_repo_changes(db_project):
source_repo = db_project.source_repository
pulled_revisions = source_repo.pull()
has_changed = has_repo_changed(source_repo.last_synced_revisions, pulled_revisions)
return has_changed
def pull_locale_repo_changes(db_project, locales):
"""
Update the local files with changes from the VCS. Returns True
if any of the updated repos have changed since the last sync.
"""
has_changed = False
repo_locales = {}
# If none of the locales have changed, quit early.
if not locales:
return has_changed, repo_locales
# Skip already pulled locales. Useful for projects with multiple repositories,
# since we don't store the information what locale belongs to what repository.
pulled_locales = []
for repo in db_project.translation_repositories():
remaining_locales = locales.exclude(code__in=pulled_locales)
if not remaining_locales:
break
pulled_revisions = repo.pull(remaining_locales)
repo_locales[repo.pk] = Locale.objects.filter(code__in=pulled_revisions.keys())
pulled_locales += pulled_revisions.keys()
if has_repo_changed(repo.last_synced_revisions, pulled_revisions):
has_changed = True
return has_changed, repo_locales
def commit_changes(db_project, vcs_project, changeset, locale):
"""Commit the changes we've made back to the VCS."""
authors = changeset.commit_authors_per_locale.get(locale.code, [])
# Use the top translator for this batch as commit author, or
# the fake Pontoon user if there are no authors.
if len(authors) > 0:
commit_author = Counter(authors).most_common(1)[0][0]
else:
commit_author = User(
first_name=settings.VCS_SYNC_NAME, email=settings.VCS_SYNC_EMAIL
)
commit_message = render_to_string(
"sync/commit_message.jinja",
{"locale": locale, "project": db_project, "authors": set(authors)},
)
locale_path = vcs_project.locale_directory_paths[locale.code]
repo = db_project.repository_for_path(locale_path)
repo.commit(commit_message, commit_author, locale_path)
def get_changed_locales(db_project, locales, now):
"""
Narrow down locales to the ones that have changed since the last sync by fetching latest
repository commit hashes via API. For projects with many repositories, this is much faster
than running VCS pull/clone for each repository.
"""
repos = db_project.translation_repositories()
# Requirement: all translation repositories must have API configured.
for repo in repos:
if not repo.api_config:
return locales
log.info(f"Fetching latest commit hashes for project {db_project.slug} started.")
# If locale has changed in the DB, we need to sync it.
changed_locale_pks = list(
locales.filter(
changedentitylocale__entity__resource__project=db_project,
changedentitylocale__when__lte=now,
).values_list("pk", flat=True)
)
unchanged_locale_pks = []
error_locale_pks = set()
for repo in repos:
for locale in locales:
# If we already processed the locale, we can move on.
if locale.pk in changed_locale_pks + unchanged_locale_pks:
continue
try:
locale_api_endpoint = repo.api_config["endpoint"].format(
locale_code=locale.code
)
response = requests.get(locale_api_endpoint)
# Raise exception on 4XX client error or 5XX server error response
response.raise_for_status()
# If locale has not synced yet, we need to sync it.
last_synced_commit_id = repo.get_last_synced_revisions(locale.code)
if not last_synced_commit_id:
changed_locale_pks.append(locale.pk)
continue
# If locale has changed in the VCS, we need to sync it.
latest_commit_id = repo.api_config["get_key"](response.json())
if not latest_commit_id.startswith(last_synced_commit_id):
changed_locale_pks.append(locale.pk)
# If locale hasn't changed in the VCS, we don't need to sync it.
else:
unchanged_locale_pks.append(locale.pk)
# Errors and exceptions can mean locale is in a different repository or indicate
# an actual network problem.
except requests.exceptions.RequestException:
error_locale_pks.add(locale.pk)
# Check if any locale for which the exception was raised hasn't been processed yet.
# For those locales we can't be sure if a change happened, so we assume it did.
for l in error_locale_pks:
if l not in changed_locale_pks + unchanged_locale_pks:
log.error(
"Unable to fetch latest commit hash for locale {locale} in project {project}".format(
locale=Locale.objects.get(pk=l), project=db_project.slug
)
)
changed_locale_pks.append(locale.pk)
changed_locales = db_project.locales.filter(pk__in=changed_locale_pks)
log.info(
"Fetching latest commit hashes for project {project} complete. Changed locales: {locales}.".format(
project=db_project.slug,
locales=", ".join(changed_locales.values_list("code", flat=True)),
)
)
return changed_locales
| {
"repo_name": "mathjazz/pontoon",
"path": "pontoon/sync/core.py",
"copies": "2",
"size": "16254",
"license": "bsd-3-clause",
"hash": 8727507165413039000,
"line_mean": 35.28125,
"line_max": 107,
"alpha_frac": 0.6499938477,
"autogenerated": false,
"ratio": 4.181631077952148,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5831624925652148,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
import requests
from collections import Counter
from celery import shared_task
from django.contrib.auth.models import User
from django.core.cache import cache
from django.db import transaction
from django.template.loader import render_to_string
from pontoon.base.models import (
Entity,
Locale,
Resource,
TranslatedResource,
)
from pontoon.sync.changeset import ChangeSet
from pontoon.sync.vcs.models import VCSProject
log = logging.getLogger(__name__)
def update_originals(db_project, now, full_scan=False):
vcs_project = VCSProject(db_project, locales=[], full_scan=full_scan)
with transaction.atomic():
removed_paths, added_paths = update_resources(db_project, vcs_project)
changeset = ChangeSet(db_project, vcs_project, now)
update_entities(db_project, vcs_project, changeset)
changeset.execute()
return changeset.changes, removed_paths, added_paths
def serial_task(timeout, lock_key="", on_error=None, **celery_args):
"""
Decorator ensures that there's only one running task with given task_name.
Decorated tasks are bound tasks, meaning their first argument is always their Task instance
:param timeout: time after which lock is released.
:param lock_key: allows to define different lock for respective parameters of task.
:param on_error: callback to be executed if an error is raised.
:param celery_args: argument passed to celery's shared_task decorator.
"""
def wrapper(func):
@shared_task(bind=True, **celery_args)
@wraps(func)
def wrapped_func(self, *args, **kwargs):
lock_name = "serial_task.{}[{}]".format(self.name, lock_key.format(*args, **kwargs))
# Acquire the lock
if not cache.add(lock_name, True, timeout=timeout):
error = RuntimeError(
"Can't execute task '{}' because the previously called "
"task is still running.".format(lock_name)
)
if callable(on_error):
on_error(error, *args, **kwargs)
raise error
try:
return func(self, *args, **kwargs)
finally:
# release the lock
cache.delete(lock_name)
return wrapped_func
return wrapper
def collect_entities(db_project, vcs_project, unsynced_locales=None):
"""
Find all the entities in the database and on the filesystem and
match them together, yielding tuples of the form
(entity_key, database_entity, vcs_entity).
When a match isn't found, the missing entity will be None.
"""
changed_resources = None if unsynced_locales else vcs_project.changed_files
db_entities = get_db_entities(db_project, changed_resources)
vcs_entities = get_vcs_entities(vcs_project)
entity_keys = set().union(db_entities.keys(), vcs_entities.keys())
for key in entity_keys:
yield key, db_entities.get(key, None), vcs_entities.get(key, None)
def update_entities(db_project, vcs_project, changeset):
for key, db_entity, vcs_entity in collect_entities(db_project, vcs_project):
if vcs_entity is None:
if db_entity is None:
# This should never happen. What? Hard abort.
raise ValueError(u'No entities found for key `{0}`'.format(key))
else:
# VCS no longer has the entity, obsolete it.
changeset.obsolete_db_entity(db_entity)
elif db_entity is None:
# New VCS entities are added to Pontoon.
changeset.create_db_entity(vcs_entity)
else:
changeset.update_db_source_entity(db_entity, vcs_entity)
def update_resources(db_project, vcs_project):
"""Update the database on what resource files exist in VCS."""
log.debug('Scanning {}'.format(vcs_project.source_directory_path))
_, vcs_removed_files = vcs_project.changed_source_files
removed_resources = db_project.resources.filter(path__in=vcs_removed_files)
removed_paths = removed_resources.values_list('path', flat=True)
added_paths = []
log.debug('Removed files: {}'.format(', '.join(removed_paths) or 'None'))
removed_resources.delete()
for relative_path, vcs_resource in vcs_project.resources.items():
resource, created = db_project.resources.get_or_create(path=relative_path)
resource.format = Resource.get_path_format(relative_path)
resource.total_strings = len(vcs_resource.entities)
resource.save()
if created:
added_paths.append(relative_path)
log.debug('Added files: {}'.format(', '.join(added_paths) or 'None'))
return removed_paths, added_paths
def update_translations(db_project, vcs_project, locale, changeset):
all_entities = collect_entities(db_project, vcs_project, db_project.unsynced_locales)
for key, db_entity, vcs_entity in all_entities:
# If we don't have both the db_entity and cs_entity we can't
# do anything with the translations.
if db_entity is None or vcs_entity is None:
continue
if not vcs_entity.has_translation_for(locale.code):
# VCS lacks an entity for this locale, so we can't
# pull updates nor edit it. Skip it!
continue
if db_entity.has_changed(locale):
# Pontoon changes overwrite whatever VCS has.
changeset.update_vcs_entity(locale, db_entity, vcs_entity)
else:
# If Pontoon has nothing or has not changed, and the VCS
# still has the entity, update Pontoon with whatever may
# have changed.
changeset.update_db_entity(locale, db_entity, vcs_entity)
def update_translated_resources(db_project, vcs_project, locale):
"""Update the TranslatedResource entries in the database."""
for resource in db_project.resources.all():
# We only want to create/update the TranslatedResource object if the
# resource exists in the current locale, UNLESS the file is asymmetric.
vcs_resource = vcs_project.resources.get(resource.path, None)
if vcs_resource is not None:
resource_exists = vcs_resource.files.get(locale) is not None
if resource_exists or resource.is_asymmetric:
translatedresource, _ = (
TranslatedResource.objects.get_or_create(resource=resource, locale=locale)
)
translatedresource.calculate_stats()
def update_translated_resources_no_files(db_project, locale, changed_resources):
"""
Create/update TranslatedResource entries if files aren't available. This typically happens when
originals change and translations don't, so we don't pull locale repositories.
"""
for resource in changed_resources:
# We can only update asymmetric (monolingual) TranslatedResources. For bilingual files we
# only create TranslatedResources if the file is present in the repository for the locale,
# which we cannot check without files.
if not resource.is_asymmetric:
log.error(
'Unable to calculate stats for asymmetric resource: {resource}.'.format(resource)
)
continue
translatedresource, _ = (
TranslatedResource.objects.get_or_create(resource=resource, locale=locale)
)
translatedresource.calculate_stats()
def get_vcs_entities(vcs_project):
return {entity_key(entity): entity for entity in vcs_project.entities}
def get_changed_entities(db_project, changed_resources):
entities = (
Entity.objects
.select_related('resource')
.prefetch_related('changed_locales')
.filter(resource__project=db_project, obsolete=False)
)
if changed_resources is not None:
entities = entities.filter(resource__path__in=changed_resources)
return entities
def get_db_entities(db_project, changed_resources=None):
return {
entity_key(entity): entity for entity in get_changed_entities(
db_project, changed_resources
)
}
def entity_key(entity):
"""
Generate a key for the given entity that is unique within the
project.
"""
key = entity.key or entity.string
return ':'.join([entity.resource.path, key])
def pull_changes(db_project, locales=None):
"""
Update the local files with changes from the VCS. Returns True
if any of the updated repos have changed since the last sync.
"""
changed = False
repo_locales = {}
# When syncing sources, pull source repository only.
if locales is None:
repositories = [db_project.source_repository]
# When syncing locales and some have changed, pull all project repositories.
elif locales:
repositories = db_project.repositories.all()
# When syncing locales and none have changed, quit early.
else:
return changed, repo_locales
locales = locales or db_project.locales.all()
# Skip already pulled locales. Useful for projects with multiple repositories (e.g. Firefox),
# since we don't store the information what locale belongs to what repository.
pulled_locales = []
for repo in repositories:
remaining_locales = locales.exclude(code__in=pulled_locales)
if not remaining_locales:
break
repo_revisions = repo.pull(remaining_locales)
repo_locales[repo.pk] = Locale.objects.filter(code__in=repo_revisions.keys())
pulled_locales += repo_revisions.keys()
# If any revision is None, we can't be sure if a change
# happened or not, so we default to assuming it did.
unsure_change = None in repo_revisions.values()
if unsure_change or repo_revisions != repo.last_synced_revisions:
changed = True
return changed, repo_locales
def commit_changes(db_project, vcs_project, changeset, locale):
"""Commit the changes we've made back to the VCS."""
authors = changeset.commit_authors_per_locale.get(locale.code, [])
# Use the top translator for this batch as commit author, or
# the fake Pontoon user if there are no authors.
if len(authors) > 0:
commit_author = Counter(authors).most_common(1)[0][0]
else:
commit_author = User(first_name="Mozilla Pontoon", email="pontoon@mozilla.com")
commit_message = render_to_string('sync/commit_message.jinja', {
'locale': locale,
'project': db_project,
'authors': set(authors)
})
locale_path = vcs_project.locale_directory_paths[locale.code]
repo = db_project.repository_for_path(locale_path)
repo.commit(commit_message, commit_author, locale_path)
def get_changed_locales(db_project, locales, now):
"""
Narrow down locales to the ones that have changed since the last sync by fetching latest
repository commit hashes via API. For projects with many repositories, this is much faster
than running VCS pull/clone for each repository.
"""
repos = db_project.translation_repositories()
# Requirement: all translation repositories must have API configured.
for repo in repos:
if not repo.api_config:
return locales
log.info('Fetching latest commit hashes for project {0} started.'.format(db_project.slug))
# If locale has changed in the DB, we need to sync it.
changed_locale_pks = list(locales.filter(
changedentitylocale__entity__resource__project=db_project,
changedentitylocale__when__lte=now
).values_list('pk', flat=True))
unchanged_locale_pks = []
error_locale_pks = set()
for repo in repos:
for locale in locales:
# If we already processed the locale, we can move on.
if locale.pk in changed_locale_pks + unchanged_locale_pks:
continue
try:
locale_api_endpoint = repo.api_config['endpoint'].format(locale_code=locale.code)
response = requests.get(locale_api_endpoint)
# Raise exception on 4XX client error or 5XX server error response
response.raise_for_status()
# If locale has not synced yet, we need to sync it.
last_synced_commit_id = repo.get_last_synced_revisions(locale.code)
if not last_synced_commit_id:
changed_locale_pks.append(locale.pk)
continue
# If locale has changed in the VCS, we need to sync it.
latest_commit_id = repo.api_config['get_key'](response.json())
if not latest_commit_id.startswith(last_synced_commit_id):
changed_locale_pks.append(locale.pk)
# If locale hasn't changed in the VCS, we don't need to sync it.
else:
unchanged_locale_pks.append(locale.pk)
# Errors and exceptions can mean locale is in a different repository or indicate
# an actual network problem.
except requests.exceptions.RequestException:
error_locale_pks.add(locale.pk)
# Check if any locale for which the exception was raised hasn't been processed yet.
# For those locales we can't be sure if a change happened, so we assume it did.
for l in error_locale_pks:
if l not in changed_locale_pks + unchanged_locale_pks:
log.error(
'Unable to fetch latest commit hash for locale {locale} in project {project}'
.format(locale=Locale.objects.get(pk=l), project=db_project.slug)
)
changed_locale_pks.append(locale.pk)
changed_locales = db_project.locales.filter(pk__in=changed_locale_pks)
log.info(
'Fetching latest commit hashes for project {project} complete. Changed locales: {locales}.'
.format(
project=db_project.slug,
locales=', '.join(changed_locales.values_list("code", flat=True))
)
)
return changed_locales
| {
"repo_name": "mastizada/pontoon",
"path": "pontoon/sync/core.py",
"copies": "1",
"size": "14126",
"license": "bsd-3-clause",
"hash": 1853000041828639500,
"line_mean": 37.8076923077,
"line_max": 99,
"alpha_frac": 0.6519184483,
"autogenerated": false,
"ratio": 4.163277335691129,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001104049339859664,
"num_lines": 364
} |
from functools import wraps
import logging
import struct
import capstone as cs
from .abstractcpu import Abi, Cpu, Interruption, Operand, RegisterFile, SyscallAbi
from .abstractcpu import instruction as abstract_instruction
from .bitwise import *
from .register import Register
from ...core.smtlib import Operators, BitVecConstant, issymbolic
logger = logging.getLogger(__name__)
# map different instructions to a single impl here
OP_NAME_MAP = {"MOVW": "MOV"}
def HighBit(n):
return Bit(n, 31)
def instruction(body):
@wraps(body)
def instruction_implementation(cpu, *args, **kwargs):
ret = None
should_execute = cpu.should_execute_conditional()
if cpu._at_symbolic_conditional == cpu.instruction.address:
cpu._at_symbolic_conditional = None
should_execute = True
else:
if issymbolic(should_execute):
# Let's remember next time we get here we should not do this again
cpu._at_symbolic_conditional = cpu.instruction.address
i_size = cpu.instruction.size
cpu.PC = Operators.ITEBV(
cpu.address_bit_size, should_execute, cpu.PC - i_size, cpu.PC
)
return
if should_execute:
ret = body(cpu, *args, **kwargs)
if cpu.should_commit_flags():
cpu.commit_flags()
return ret
return abstract_instruction(instruction_implementation)
_TYPE_MAP = {
cs.arm.ARM_OP_REG: "register",
cs.arm.ARM_OP_MEM: "memory",
cs.arm.ARM_OP_IMM: "immediate",
cs.arm.ARM_OP_PIMM: "coprocessor",
cs.arm.ARM_OP_CIMM: "immediate",
}
class Armv7Operand(Operand):
def __init__(self, cpu, op, **kwargs):
super().__init__(cpu, op, **kwargs)
self.__type = _TYPE_MAP[self.op.type]
@property
def type(self):
"""
Corresponds to capstone's `operand.type` (cs.arm.ARM_OP_*).
"""
return self.__type
@property
def size(self):
assert self.__type == "register"
if cs.arm.ARM_REG_D0 <= self.op.reg <= cs.arm.ARM_REG_D31:
return 64
else:
# FIXME check other types of operand sizes
return 32
def read(self, nbits=None, with_carry=False):
carry = self.cpu.regfile.read("APSR_C")
if self.__type == "register":
value = self.cpu.regfile.read(self.reg)
# PC in this case has to be set to the instruction after next. PC at this point
# is already pointing to next instruction; we bump it one more.
if self.reg in ("PC", "R15"):
value += self.cpu.instruction.size
if self.is_shifted():
shift = self.op.shift
# XXX: This is unnecessary repetition.
if shift.type in range(cs.arm.ARM_SFT_ASR_REG, cs.arm.ARM_SFT_RRX_REG + 1):
if self.cpu.mode == cs.CS_MODE_THUMB:
amount = shift.value.read()
else:
src_reg = self.cpu.instruction.reg_name(shift.value).upper()
amount = self.cpu.regfile.read(src_reg)
else:
amount = shift.value
value, carry = self.cpu._shift(value, shift.type, amount, carry)
if self.op.subtracted:
value = -value
if with_carry:
return value, carry
return value
elif self.__type == "immediate":
imm = self.op.imm
if self.op.subtracted:
imm = -imm
if with_carry:
return imm, self._get_expand_imm_carry(carry)
return imm
elif self.__type == "coprocessor":
imm = self.op.imm
return imm
elif self.__type == "memory":
val = self.cpu.read_int(self.address(), nbits)
if with_carry:
return val, carry
return val
else:
raise NotImplementedError("readOperand unknown type", self.op.type)
def write(self, value, nbits=None):
if self.__type == "register":
self.cpu.regfile.write(self.reg, value)
elif self.__type == "memory":
raise NotImplementedError("need to impl arm store mem")
else:
raise NotImplementedError("writeOperand unknown type", self.op.type)
def writeback(self, value):
if self.__type == "register":
self.write(value)
elif self.__type == "memory":
self.cpu.regfile.write(self.mem.base, value)
else:
raise NotImplementedError("writeback Operand unknown type", self.op.type)
def is_shifted(self):
"""
In ARM some of the operands may have an additional metadata which means they can be shifted
with either a register or immediate value.
See:
* https://github.com/aquynh/capstone/blob/fdebc371ba0568acde007e08dad2cc3c9333e3fa/include/arm.h#L22-L34
* 11.5 Syntax of Operand2 as a register with optional shift
http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0473m/dom1361289852638.html
:return: True if operand is shifted, otherwise False.
"""
return self.op.shift.type != cs.arm.ARM_SFT_INVALID
def address(self):
assert self.__type == "memory"
addr = self.get_mem_base_addr() + self.get_mem_offset()
return addr & Mask(self.cpu.address_bit_size)
def get_mem_offset(self):
assert self.__type == "memory"
off = 0
if self.mem.index is not None:
idx = self.mem.scale * self.cpu.regfile.read(self.mem.index)
carry = self.cpu.regfile.read("APSR_C")
if self.is_shifted():
shift = self.op.shift
idx, carry = self.cpu._shift(idx, shift.type, shift.value, carry)
off = -idx if self.op.subtracted else idx
else:
off = self.mem.disp
return off
def get_mem_base_addr(self):
assert self.__type == "memory"
base = self.cpu.regfile.read(self.mem.base)
# PC relative addressing is fun in ARM:
# In ARM mode, the spec defines the base value as current insn + 8
# In thumb mode, the spec defines the base value as ALIGN(current insn address) + 4,
# where ALIGN(current insn address) => <current insn address> & 0xFFFFFFFC
#
# Regardless of mode, our implementation of read(PC) will return the address
# of the instruction following the next instruction.
if self.mem.base in ("PC", "R15"):
if self.cpu.mode == cs.CS_MODE_ARM:
logger.debug(f"ARM mode PC relative addressing: PC + offset: 0x{base:x} + 0x{4:x}")
return base + 4
else:
# base currently has the value PC + len(current_instruction)
# we need (PC & 0xFFFFFFFC) + 4
# thus:
new_base = (base - self.cpu.instruction.size) & 0xFFFFFFFC
logger.debug(
f"THUMB mode PC relative addressing: ALIGN(PC) + offset => 0x{new_base:x} + 0x{4:x}"
)
return new_base + 4
else:
return base
def _get_expand_imm_carry(self, carryIn):
"""Manually compute the carry bit produced by expanding an immediate operand (see ARMExpandImm_C)"""
insn = struct.unpack("<I", self.cpu.instruction.bytes)[0]
unrotated = insn & Mask(8)
shift = Operators.EXTRACT(insn, 8, 4)
_, carry = self.cpu._shift(unrotated, cs.arm.ARM_SFT_ROR, 2 * shift, carryIn)
return carry
class Armv7RegisterFile(RegisterFile):
def __init__(self):
"""
ARM Register file abstraction. GPRs use ints for read/write. APSR
flags allow writes of bool/{1, 0} but always read bools.
"""
super().__init__(
{
"SB": "R9",
"SL": "R10",
"FP": "R11",
"IP": "R12",
"STACK": "R13",
"SP": "R13",
"LR": "R14",
"PC": "R15",
}
)
self._regs = {}
# 32 bit registers
for reg_name in (
"R0",
"R1",
"R2",
"R3",
"R4",
"R5",
"R6",
"R7",
"R8",
"R9",
"R10",
"R11",
"R12",
"R13",
"R14",
"R15",
):
self._regs[reg_name] = Register(32)
# 64 bit registers
for reg_name in (
"D0",
"D1",
"D2",
"D3",
"D4",
"D5",
"D6",
"D7",
"D8",
"D9",
"D10",
"D11",
"D12",
"D13",
"D14",
"D15",
"D16",
"D17",
"D18",
"D19",
"D20",
"D21",
"D22",
"D23",
"D24",
"D25",
"D26",
"D27",
"D28",
"D29",
"D30",
"D31",
):
self._regs[reg_name] = Register(64)
# Flags
self._regs["APSR_N"] = Register(1)
self._regs["APSR_Z"] = Register(1)
self._regs["APSR_C"] = Register(1)
self._regs["APSR_V"] = Register(1)
self._regs["APSR_GE"] = Register(4)
# MMU Coprocessor -- to support MCR/MRC for TLS
self._regs["P15_C13"] = Register(32)
def _read_APSR(self):
def make_apsr_flag(flag_expr, offset):
"""Helper for constructing an expression for the APSR register"""
return Operators.ITEBV(
32, flag_expr, BitVecConstant(32, 1 << offset), BitVecConstant(32, 0)
)
apsr = 0
N = self.read("APSR_N")
Z = self.read("APSR_Z")
C = self.read("APSR_C")
V = self.read("APSR_V")
if any(issymbolic(x) for x in [N, Z, C, V]):
apsr = (
make_apsr_flag(N, 31)
| make_apsr_flag(Z, 30)
| make_apsr_flag(C, 29)
| make_apsr_flag(V, 28)
)
else:
if N:
apsr |= 1 << 31
if Z:
apsr |= 1 << 30
if C:
apsr |= 1 << 29
if V:
apsr |= 1 << 28
return apsr
def _write_APSR(self, apsr):
"""Auxiliary function - Writes flags from a full APSR (only 4 msb used)"""
V = Operators.EXTRACT(apsr, 28, 1)
C = Operators.EXTRACT(apsr, 29, 1)
Z = Operators.EXTRACT(apsr, 30, 1)
N = Operators.EXTRACT(apsr, 31, 1)
self.write("APSR_V", V)
self.write("APSR_C", C)
self.write("APSR_Z", Z)
self.write("APSR_N", N)
def read(self, register):
assert register in self
if register == "APSR":
return self._read_APSR()
register = self._alias(register)
return self._regs[register].read()
def write(self, register, value):
assert register in self
if register == "APSR":
return self._write_APSR(value)
register = self._alias(register)
self._regs[register].write(value)
@property
def all_registers(self):
return super().all_registers + (
"R0",
"R1",
"R2",
"R3",
"R4",
"R5",
"R6",
"R7",
"R8",
"R9",
"R10",
"R11",
"R12",
"R13",
"R14",
"R15",
"D0",
"D1",
"D2",
"D3",
"D4",
"D5",
"D6",
"D7",
"D8",
"D9",
"D10",
"D11",
"D12",
"D13",
"D14",
"D15",
"D16",
"D17",
"D18",
"D19",
"D20",
"D21",
"D22",
"D23",
"D24",
"D25",
"D26",
"D27",
"D28",
"D29",
"D30",
"D31",
"APSR",
"APSR_N",
"APSR_Z",
"APSR_C",
"APSR_V",
"APSR_GE",
"P15_C13",
)
@property
def canonical_registers(self):
return (
"R0",
"R1",
"R2",
"R3",
"R4",
"R5",
"R6",
"R7",
"R8",
"R9",
"R10",
"R11",
"R12",
"R13",
"R14",
"R15",
"APSR",
)
class Armv7LinuxSyscallAbi(SyscallAbi):
"""ARMv7 Linux system call ABI"""
# EABI standards:
# syscall # is in R7
# arguments are passed in R0-R6
# retval is passed in R0
def syscall_number(self):
return self._cpu.R7
def get_arguments(self):
for i in range(6):
yield f"R{i}"
def write_result(self, result):
self._cpu.R0 = result
class Armv7CdeclAbi(Abi):
"""ARMv7 Cdecl function call ABI"""
def get_arguments(self):
# First four passed via R0-R3, then on stack
for reg in ("R0", "R1", "R2", "R3"):
yield reg
for address in self.values_from(self._cpu.STACK):
yield address
def write_result(self, result):
self._cpu.R0 = result
def ret(self):
self._cpu.PC = self._cpu.LR
class Armv7Cpu(Cpu):
"""
Cpu specialization handling the ARMv7 architecture.
Note: In this implementation, PC contains address of current
instruction + 4. However, official spec defines PC to be address of
current instruction + 8 (section A2.3).
"""
address_bit_size = 32
max_instr_width = 4
machine = "armv7"
arch = cs.CS_ARCH_ARM
# 'mode' is usually defined here as a class member, but it can change, so
# it's an instance property.
def __init__(self, memory):
self._it_conditional = list()
self._last_flags = {"C": 0, "V": 0, "N": 0, "Z": 0, "GE": 0}
self._at_symbolic_conditional = None
self._mode = cs.CS_MODE_ARM
super().__init__(Armv7RegisterFile(), memory)
def __getstate__(self):
state = super().__getstate__()
state["_last_flags"] = self._last_flags
state["at_symbolic_conditional"] = self._at_symbolic_conditional
state["_it_conditional"] = self._it_conditional
state["_mode"] = self._mode
return state
def __setstate__(self, state):
self._last_flags = state["_last_flags"]
self._at_symbolic_conditional = state["at_symbolic_conditional"]
self._it_conditional = state["_it_conditional"]
self._mode = state["_mode"]
super().__setstate__(state)
@property
def mode(self):
return self._mode
@mode.setter
def mode(self, new_mode):
assert new_mode in (cs.CS_MODE_ARM, cs.CS_MODE_THUMB)
if self._mode != new_mode:
logger.debug(f'swapping into {"ARM" if new_mode == cs.CS_MODE_ARM else "THUMB"} mode')
self._mode = new_mode
self.disasm.disasm.mode = new_mode
def _set_mode_by_val(self, val):
new_mode = Operators.ITEBV(
self.address_bit_size, (val & 0x1) == 0x1, cs.CS_MODE_THUMB, cs.CS_MODE_ARM
)
if issymbolic(new_mode):
from ..state import Concretize
def set_concrete_mode(state, value):
state.cpu.mode = value
raise Concretize(
"Concretizing ARMv7 mode", expression=new_mode, setstate=set_concrete_mode
)
self.mode = new_mode
def _swap_mode(self):
"""Toggle between ARM and Thumb mode"""
assert self.mode in (cs.CS_MODE_ARM, cs.CS_MODE_THUMB)
if self.mode == cs.CS_MODE_ARM:
self.mode = cs.CS_MODE_THUMB
else:
self.mode = cs.CS_MODE_ARM
# Flags that are the result of arithmetic instructions. Unconditionally
# set, but conditionally committed.
#
# Register file has the actual CPU flags
def set_flags(self, **flags):
"""
Note: For any unmodified flags, update _last_flags with the most recent
committed value. Otherwise, for example, this could happen:
overflow=0
instr1 computes overflow=1, updates _last_flags, doesn't commit
instr2 updates all flags in _last_flags except overflow (overflow remains 1 in _last_flags)
instr2 commits all in _last_flags
now overflow=1 even though it should still be 0
"""
unupdated_flags = self._last_flags.keys() - flags.keys()
for flag in unupdated_flags:
flag_name = f"APSR_{flag}"
self._last_flags[flag] = self.regfile.read(flag_name)
self._last_flags.update(flags)
def commit_flags(self):
# XXX: capstone incorrectly sets .update_flags for adc
if self.instruction.mnemonic == "adc":
return
for flag, val in self._last_flags.items():
flag_name = f"APSR_{flag}"
self.regfile.write(flag_name, val)
def _shift(cpu, value, _type, amount, carry):
"""See Shift() and Shift_C() in the ARM manual"""
assert cs.arm.ARM_SFT_INVALID < _type <= cs.arm.ARM_SFT_RRX_REG
# XXX: Capstone should set the value of an RRX shift to 1, which is
# asserted in the manual, but it sets it to 0, so we have to check
if _type in (cs.arm.ARM_SFT_RRX, cs.arm.ARM_SFT_RRX_REG) and amount != 1:
amount = 1
elif _type in range(cs.arm.ARM_SFT_ASR_REG, cs.arm.ARM_SFT_RRX_REG + 1):
amount = Operators.EXTRACT(amount, 0, 8)
if amount == 0:
return value, carry
width = cpu.address_bit_size
if _type in (cs.arm.ARM_SFT_ASR, cs.arm.ARM_SFT_ASR_REG):
return ASR_C(value, amount, width)
elif _type in (cs.arm.ARM_SFT_LSL, cs.arm.ARM_SFT_LSL_REG):
return LSL_C(value, amount, width)
elif _type in (cs.arm.ARM_SFT_LSR, cs.arm.ARM_SFT_LSR_REG):
return LSR_C(value, amount, width)
elif _type in (cs.arm.ARM_SFT_ROR, cs.arm.ARM_SFT_ROR_REG):
return ROR_C(value, amount, width)
elif _type in (cs.arm.ARM_SFT_RRX, cs.arm.ARM_SFT_RRX_REG):
return RRX_C(value, carry, width)
raise NotImplementedError("Bad shift value")
# TODO add to abstract cpu, and potentially remove stacksub/add from it?
def stack_push(self, data, nbytes=None):
if isinstance(data, int):
nbytes = nbytes or self.address_bit_size // 8
self.SP -= nbytes
self.write_int(self.SP, data, nbytes * 8)
elif isinstance(data, BitVec):
self.SP -= data.size // 8
self.write_int(self.SP, data, data.size)
elif isinstance(data, str):
self.SP -= len(data)
self.write(self.SP, data)
else:
raise NotImplementedError("unsupported type for stack push data")
return self.SP
def stack_peek(self, nbytes=4):
return self.read(self.SP, nbytes)
def stack_pop(self, nbytes=4):
# TODO is the distinction between load and read really in the op size?
nbits = nbytes * 8
if nbits == self.address_bit_size:
val = self.read_int(self.SP, nbits)
else:
val = self.read(self.SP, nbytes)
self.SP += nbytes
return val
def read(self, addr, nbytes):
return self.read_bytes(addr, nbytes)
def write(self, addr, data):
return self.write_bytes(addr, data)
def set_arm_tls(self, data):
self.regfile.write("P15_C13", data)
@staticmethod
def canonicalize_instruction_name(instr):
name = instr.insn_name().upper()
# XXX bypass a capstone bug that incorrectly labels some insns as mov
if name == "MOV":
if instr.mnemonic.startswith("lsr"):
return "LSR"
elif instr.mnemonic.startswith("lsl"):
return "LSL"
elif instr.mnemonic.startswith("asr"):
return "ASR"
return OP_NAME_MAP.get(name, name)
def _wrap_operands(self, ops):
return [Armv7Operand(self, op) for op in ops]
def should_commit_flags(cpu):
# workaround for a capstone bug (issue #980);
# the bug has been fixed the 'master' and 'next' branches of capstone as of 2017-07-31
if cpu.instruction.id == cs.arm.ARM_INS_UADD8:
return True
return cpu.instruction.update_flags
def should_execute_conditional(cpu):
# for the IT instruction, the cc applies to the subsequent instructions,
# so the IT instruction should be executed regardless of its cc
if cpu.instruction.id == cs.arm.ARM_INS_IT:
return True
# support for the it[x[y[z]]] <op> instructions
if cpu._it_conditional:
return cpu._it_conditional.pop(0)
cc = cpu.instruction.cc
return cpu._evaluate_conditional(cc)
def _evaluate_conditional(cpu, cc):
C = cpu.regfile.read("APSR_C")
N = cpu.regfile.read("APSR_N")
V = cpu.regfile.read("APSR_V")
Z = cpu.regfile.read("APSR_Z")
if cc == cs.arm.ARM_CC_AL:
ret = True
elif cc == cs.arm.ARM_CC_EQ:
ret = Z
elif cc == cs.arm.ARM_CC_NE:
ret = Operators.NOT(Z)
elif cc == cs.arm.ARM_CC_HS:
ret = C
elif cc == cs.arm.ARM_CC_LO:
ret = Operators.NOT(C)
elif cc == cs.arm.ARM_CC_MI:
ret = N
elif cc == cs.arm.ARM_CC_PL:
ret = Operators.NOT(N)
elif cc == cs.arm.ARM_CC_VS:
ret = V
elif cc == cs.arm.ARM_CC_VC:
ret = Operators.NOT(V)
elif cc == cs.arm.ARM_CC_HI:
ret = Operators.AND(C, Operators.NOT(Z))
elif cc == cs.arm.ARM_CC_LS:
ret = Operators.OR(Operators.NOT(C), Z)
elif cc == cs.arm.ARM_CC_GE:
ret = N == V
elif cc == cs.arm.ARM_CC_LT:
ret = N != V
elif cc == cs.arm.ARM_CC_GT:
ret = Operators.AND(Operators.NOT(Z), N == V)
elif cc == cs.arm.ARM_CC_LE:
ret = Operators.OR(Z, N != V)
else:
raise NotImplementedError("Bad conditional tag")
return ret
@instruction
def IT(cpu):
cc = cpu.instruction.cc
true_case = cpu._evaluate_conditional(cc)
# this is incredibly hacky--how else does capstone expose this?
# TODO: find a better way than string parsing the mnemonic -GR, 2017-07-13
for c in cpu.instruction.mnemonic[1:]:
if c == "t":
cpu._it_conditional.append(true_case)
elif c == "e":
cpu._it_conditional.append(not true_case)
@instruction
def UADD8(cpu, dest, src, op):
op1 = src.read()
op2 = op.read()
sums = list()
carries = list()
for i in range(4):
uo1 = UInt(Operators.ZEXTEND(Operators.EXTRACT(op1, (8 * i), 8), 9), 9)
uo2 = UInt(Operators.ZEXTEND(Operators.EXTRACT(op2, (8 * i), 8), 9), 9)
byte = uo1 + uo2
carry = Operators.EXTRACT(byte, 8, 1)
sums.append(Operators.EXTRACT(byte, 0, 8))
carries.append(carry)
dest.write(Operators.CONCAT(32, *reversed(sums)))
cpu.set_flags(GE=Operators.CONCAT(4, *reversed(carries)))
@instruction
def SEL(cpu, dest, op1, op2):
op1val = op1.read()
op2val = op2.read()
result = list()
GE = cpu.regfile.read("APSR_GE")
for i in range(4):
bit = Operators.EXTRACT(GE, i, 1)
result.append(
Operators.ITEBV(
8, bit, Operators.EXTRACT(op1val, i * 8, 8), Operators.EXTRACT(op2val, i * 8, 8)
)
)
dest.write(Operators.CONCAT(32, *reversed(result)))
@instruction
def MOV(cpu, dest, src):
"""
Implement the MOV{S} instruction.
Note: If src operand is PC, temporarily release our logical PC
view and conform to the spec, which dictates PC = curr instr + 8
:param Armv7Operand dest: The destination operand; register.
:param Armv7Operand src: The source operand; register or immediate.
"""
if cpu.mode == cs.CS_MODE_ARM:
result, carry_out = src.read(with_carry=True)
dest.write(result)
cpu.set_flags(C=carry_out, N=HighBit(result), Z=(result == 0))
else:
# thumb mode cannot do wonky things to the operand, so no carry calculation
result = src.read()
dest.write(result)
cpu.set_flags(N=HighBit(result), Z=(result == 0))
@instruction
def MOVT(cpu, dest, src):
"""
MOVT writes imm16 to Rd[31:16]. The write does not affect Rd[15:0].
:param Armv7Operand dest: The destination operand; register
:param Armv7Operand src: The source operand; 16-bit immediate
"""
assert src.type == "immediate"
imm = src.read()
low_halfword = dest.read() & Mask(16)
dest.write((imm << 16) | low_halfword)
@instruction
def MRC(cpu, coprocessor, opcode1, dest, coprocessor_reg_n, coprocessor_reg_m, opcode2):
"""
MRC moves to ARM register from coprocessor.
:param Armv7Operand coprocessor: The name of the coprocessor; immediate
:param Armv7Operand opcode1: coprocessor specific opcode; 3-bit immediate
:param Armv7Operand dest: the destination operand: register
:param Armv7Operand coprocessor_reg_n: the coprocessor register; immediate
:param Armv7Operand coprocessor_reg_m: the coprocessor register; immediate
:param Armv7Operand opcode2: coprocessor specific opcode; 3-bit immediate
"""
assert coprocessor.type == "coprocessor"
assert opcode1.type == "immediate"
assert opcode2.type == "immediate"
assert dest.type == "register"
imm_coprocessor = coprocessor.read()
imm_opcode1 = opcode1.read()
imm_opcode2 = opcode2.read()
coprocessor_n_name = coprocessor_reg_n.read()
coprocessor_m_name = coprocessor_reg_m.read()
if 15 == imm_coprocessor: # MMU
if 0 == imm_opcode1:
if 13 == coprocessor_n_name:
if 3 == imm_opcode2:
dest.write(cpu.regfile.read("P15_C13"))
return
raise NotImplementedError(
"MRC: unimplemented combination of coprocessor, opcode, and coprocessor register"
)
@instruction
def LDRD(cpu, dest1, dest2, src, offset=None):
"""Loads double width data from memory."""
assert dest1.type == "register"
assert dest2.type == "register"
assert src.type == "memory"
mem1 = cpu.read_int(src.address(), 32)
mem2 = cpu.read_int(src.address() + 4, 32)
writeback = cpu._compute_writeback(src, offset)
dest1.write(mem1)
dest2.write(mem2)
cpu._cs_hack_ldr_str_writeback(src, offset, writeback)
@instruction
def STRD(cpu, src1, src2, dest, offset=None):
"""Writes the contents of two registers to memory."""
assert src1.type == "register"
assert src2.type == "register"
assert dest.type == "memory"
val1 = src1.read()
val2 = src2.read()
writeback = cpu._compute_writeback(dest, offset)
cpu.write_int(dest.address(), val1, 32)
cpu.write_int(dest.address() + 4, val2, 32)
cpu._cs_hack_ldr_str_writeback(dest, offset, writeback)
@instruction
def LDREX(cpu, dest, src, offset=None):
"""
LDREX loads data from memory.
* If the physical address has the shared TLB attribute, LDREX
tags the physical address as exclusive access for the current
processor, and clears any exclusive access tag for this
processor for any other physical address.
* Otherwise, it tags the fact that the executing processor has
an outstanding tagged physical address.
:param Armv7Operand dest: the destination register; register
:param Armv7Operand src: the source operand: register
"""
# TODO: add lock mechanism to underlying memory --GR, 2017-06-06
cpu._LDR(dest, src, 32, False, offset)
@instruction
def STREX(cpu, status, *args):
"""
STREX performs a conditional store to memory.
:param Armv7Operand status: the destination register for the returned status; register
"""
# TODO: implement conditional return with appropriate status --GR, 2017-06-06
status.write(0)
return cpu._STR(cpu.address_bit_size, *args)
def _UXT(cpu, dest, src, src_width):
"""
Helper for UXT* family of instructions.
:param ARMv7Operand dest: the destination register; register
:param ARMv7Operand dest: the source register; register
:param int src_width: bits to consider of the src operand
"""
val = GetNBits(src.read(), src_width)
word = Operators.ZEXTEND(val, cpu.address_bit_size)
dest.write(word)
@instruction
def UXTB(cpu, dest, src):
"""
UXTB extracts an 8-bit value from a register, zero-extends
it to the size of the register, and writes the result to the destination register.
:param ARMv7Operand dest: the destination register; register
:param ARMv7Operand dest: the source register; register
"""
cpu._UXT(dest, src, 8)
@instruction
def UXTH(cpu, dest, src):
"""
UXTH extracts an 16-bit value from a register, zero-extends
it to the size of the register, and writes the result to the destination register.
:param ARMv7Operand dest: the destination register; register
:param ARMv7Operand dest: the source register; register
"""
cpu._UXT(dest, src, 16)
@instruction
def PLD(cpu, addr, offset=None):
"""PLD instructs the cpu that the address at addr might be loaded soon."""
def _compute_writeback(cpu, operand, offset):
if offset:
off = offset.read()
else:
off = operand.get_mem_offset()
wbaddr = operand.get_mem_base_addr() + off
return wbaddr
def _cs_hack_ldr_str_writeback(cpu, operand, offset, val):
# capstone bug doesn't set writeback correctly for postindex reg
if cpu.instruction.writeback or offset:
operand.writeback(val)
def _STR(cpu, width, src, dest, offset=None):
val = src.read()
writeback = cpu._compute_writeback(dest, offset)
cpu.write_int(dest.address(), val, width)
cpu._cs_hack_ldr_str_writeback(dest, offset, writeback)
@instruction
def STR(cpu, *args):
return cpu._STR(cpu.address_bit_size, *args)
@instruction
def STRB(cpu, *args):
return cpu._STR(8, *args)
@instruction
def STRH(cpu, *args):
return cpu._STR(16, *args)
def _LDR(cpu, dest, src, width, is_signed, offset):
mem = cpu.read_int(src.address(), width)
writeback = cpu._compute_writeback(src, offset)
if is_signed:
word = Operators.SEXTEND(mem, width, cpu.address_bit_size)
else:
word = Operators.ZEXTEND(mem, cpu.address_bit_size)
if dest.reg in ("PC", "R15"):
cpu._set_mode_by_val(word)
word &= ~0x1
logger.debug(f"LDR writing 0x{word:x} -> PC")
dest.write(word)
cpu._cs_hack_ldr_str_writeback(src, offset, writeback)
@instruction
def LDR(cpu, dest, src, offset=None):
cpu._LDR(dest, src, 32, False, offset)
@instruction
def LDRH(cpu, dest, src, offset=None):
cpu._LDR(dest, src, 16, False, offset)
@instruction
def LDRSH(cpu, dest, src, offset=None):
cpu._LDR(dest, src, 16, True, offset)
@instruction
def LDRB(cpu, dest, src, offset=None):
cpu._LDR(dest, src, 8, False, offset)
@instruction
def LDRSB(cpu, dest, src, offset=None):
cpu._LDR(dest, src, 8, True, offset)
def _ADD(cpu, _op1, _op2, carry=0):
W = cpu.address_bit_size
# masking to 32 because sometimes capstone's op.imm field is negative.
# this converts it back to unsigned
_op2 = Operators.ZEXTEND(_op2, W)
uo1 = UInt(_op1, W * 2)
uo2 = UInt(_op2, W * 2)
c = UInt(carry, W * 2)
unsigned_sum = uo1 + uo2 + c
so1 = SInt(Operators.SEXTEND(_op1, W, W * 2), W * 2)
so2 = SInt(Operators.SEXTEND(_op2, W, W * 2), W * 2)
signed_sum = so1 + so2 + c
result = GetNBits(unsigned_sum, W)
carry_out = UInt(result, W * 2) != unsigned_sum
overflow = SInt(Operators.SEXTEND(result, W, W * 2), W * 2) != signed_sum
cpu.set_flags(C=carry_out, V=overflow, N=HighBit(result), Z=result == 0)
return result, carry_out, overflow
@instruction
def ADC(cpu, dest, op1, op2=None):
carry = cpu.regfile.read("APSR_C")
if op2 is not None:
result, carry, overflow = cpu._ADD(op1.read(), op2.read(), carry)
else:
result, carry, overflow = cpu._ADD(dest.read(), op1.read(), carry)
dest.write(result)
return result, carry, overflow
@instruction
def ADD(cpu, dest, src, add=None):
if add is not None:
result, carry, overflow = cpu._ADD(src.read(), add.read())
else:
# support for the thumb mode version of adds <dest>, <immediate>
result, carry, overflow = cpu._ADD(dest.read(), src.read())
dest.write(result)
return result, carry, overflow
@instruction
def RSB(cpu, dest, src, add):
inv_src = GetNBits(~src.read(), cpu.address_bit_size)
result, carry, overflow = cpu._ADD(inv_src, add.read(), 1)
dest.write(result)
return result, carry, overflow
@instruction
def RSC(cpu, dest, src, add):
carry = cpu.regfile.read("APSR_C")
inv_src = GetNBits(~src.read(), cpu.address_bit_size)
result, carry, overflow = cpu._ADD(inv_src, add.read(), carry)
dest.write(result)
return result, carry, overflow
@instruction
def SUB(cpu, dest, src, add=None):
if add is not None:
result, carry, overflow = cpu._ADD(src.read(), ~add.read(), 1)
else:
# support for the thumb mode version of sub <dest>, <immediate>
result, carry, overflow = cpu._ADD(dest.read(), ~src.read(), 1)
dest.write(result)
return result, carry, overflow
@instruction
def SBC(cpu, dest, op1, op2=None):
carry = cpu.regfile.read("APSR_C")
if op2 is not None:
result, carry, overflow = cpu._ADD(op1.read(), ~op2.read(), carry)
else:
result, carry, overflow = cpu._ADD(dest.read(), ~op1.read(), carry)
dest.write(result)
return result, carry, overflow
@instruction
def ADR(cpu, dest, src):
"""
Address to Register adds an immediate value to the PC value, and writes the result to the destination register.
:param ARMv7Operand dest: Specifies the destination register.
:param ARMv7Operand src:
Specifies the label of an instruction or literal data item whose address is to be loaded into
<Rd>. The assembler calculates the required value of the offset from the Align(PC,4)
value of the ADR instruction to this label.
"""
aligned_pc = (cpu.instruction.address + 4) & 0xFFFFFFFC
dest.write(aligned_pc + src.read())
@instruction
def ADDW(cpu, dest, src, add):
"""
This instruction adds an immediate value to a register value, and writes the result to the destination register.
It doesn't update the condition flags.
:param ARMv7Operand dest: Specifies the destination register. If omitted, this register is the same as src.
:param ARMv7Operand src:
Specifies the register that contains the first operand. If the SP is specified for dest, see ADD (SP plus
immediate). If the PC is specified for dest, see ADR.
:param ARMv7Operand add:
Specifies the immediate value to be added to the value obtained from src. The range of allowed values is
0-4095.
"""
aligned_pc = (cpu.instruction.address + 4) & 0xFFFFFFFC
if src.type == "register" and src.reg in ("PC", "R15"):
src = aligned_pc
else:
src = src.read()
dest.write(src + add.read())
@instruction
def SUBW(cpu, dest, src, add):
"""
This instruction subtracts an immediate value from a register value, and writes the result to the destination
register. It can optionally update the condition flags based on the result.
:param ARMv7Operand dest: Specifies the destination register. If omitted, this register is the same as src.
:param ARMv7Operand src:
Specifies the register that contains the first operand. If the SP is specified for dest, see ADD (SP plus
immediate). If the PC is specified for dest, see ADR.
:param ARMv7Operand add:
Specifies the immediate value to be added to the value obtained from src. The range of allowed values is
0-4095.
"""
aligned_pc = (cpu.instruction.address + 4) & 0xFFFFFFFC
if src.type == "register" and src.reg in ("PC", "R15"):
src = aligned_pc
else:
src = src.read()
dest.write(src - add.read())
@instruction
def B(cpu, dest):
cpu.PC = dest.read()
@instruction
def BX(cpu, dest):
dest_val = dest.read()
cpu._set_mode_by_val(dest_val)
cpu.PC = dest_val & ~1
@instruction
def BLE(cpu, dest):
cpu.PC = Operators.ITEBV(
cpu.address_bit_size, cpu.regfile.read("APSR_Z"), dest.read(), cpu.PC
)
@instruction
def CBZ(cpu, op, dest):
"""
Compare and Branch on Zero compares the value in a register with zero, and conditionally branches forward
a constant value. It does not affect the condition flags.
:param ARMv7Operand op: Specifies the register that contains the first operand.
:param ARMv7Operand dest:
Specifies the label of the instruction that is to be branched to. The assembler calculates the
required value of the offset from the PC value of the CBZ instruction to this label, then
selects an encoding that will set imm32 to that offset. Allowed offsets are even numbers in
the range 0 to 126.
"""
cpu.PC = Operators.ITEBV(cpu.address_bit_size, op.read(), cpu.PC, dest.read())
@instruction
def CBNZ(cpu, op, dest):
"""
Compare and Branch on Non-Zero compares the value in a register with zero, and conditionally branches
forward a constant value. It does not affect the condition flags.
:param ARMv7Operand op: Specifies the register that contains the first operand.
:param ARMv7Operand dest:
Specifies the label of the instruction that is to be branched to. The assembler calculates the
required value of the offset from the PC value of the CBNZ instruction to this label, then
selects an encoding that will set imm32 to that offset. Allowed offsets are even numbers in
the range 0 to 126.
"""
cpu.PC = Operators.ITEBV(cpu.address_bit_size, op.read(), dest.read(), cpu.PC)
@instruction
def BL(cpu, label):
next_instr_addr = cpu.regfile.read("PC")
if cpu.mode == cs.CS_MODE_THUMB:
cpu.regfile.write("LR", next_instr_addr + 1)
else:
cpu.regfile.write("LR", next_instr_addr)
cpu.regfile.write("PC", label.read())
@instruction
def BLX(cpu, dest):
address = cpu.PC
target = dest.read()
next_instr_addr = cpu.regfile.read("PC")
if cpu.mode == cs.CS_MODE_THUMB:
cpu.regfile.write("LR", next_instr_addr + 1)
else:
cpu.regfile.write("LR", next_instr_addr)
cpu.regfile.write("PC", target & ~1)
# The `blx <label>` form of this instruction forces a mode swap
# Otherwise check the lsb of the destination and set the mode
if dest.type == "immediate":
logger.debug(f"swapping mode due to BLX at inst 0x{address:x}")
cpu._swap_mode()
elif dest.type == "register":
cpu._set_mode_by_val(dest.read())
@instruction
def TBB(cpu, dest):
"""
Table Branch Byte causes a PC-relative forward branch using a table of single byte offsets. A base register
provides a pointer to the table, and a second register supplies an index into the table. The branch length is
twice the value of the byte returned from the table.
:param ARMv7Operand dest: see below; register
"""
# Capstone merges the two registers values into one operand, so we need to extract them back
# Specifies the base register. This contains the address of the table of branch lengths. This
# register is allowed to be the PC. If it is, the table immediately follows this instruction.
base_addr = dest.get_mem_base_addr()
if dest.mem.base in ("PC", "R15"):
base_addr = cpu.PC
# Specifies the index register. This contains an integer pointing to a single byte within the
# table. The offset within the table is the value of the index.
offset = cpu.read_int(base_addr + dest.get_mem_offset(), 8)
offset = Operators.ZEXTEND(offset, cpu.address_bit_size)
cpu.PC += offset << 1
@instruction
def TBH(cpu, dest):
"""
Table Branch Halfword causes a PC-relative forward branch using a table of single halfword offsets. A base
register provides a pointer to the table, and a second register supplies an index into the table. The branch
length is twice the value of the halfword returned from the table.
:param ARMv7Operand dest: see below; register
"""
# Capstone merges the two registers values into one operand, so we need to extract them back
# Specifies the base register. This contains the address of the table of branch lengths. This
# register is allowed to be the PC. If it is, the table immediately follows this instruction.
base_addr = dest.get_mem_base_addr()
if dest.mem.base in ("PC", "R15"):
base_addr = cpu.PC
# Specifies the index register. This contains an integer pointing to a halfword within the table.
# The offset within the table is twice the value of the index.
offset = cpu.read_int(base_addr + dest.get_mem_offset(), 16)
offset = Operators.ZEXTEND(offset, cpu.address_bit_size)
cpu.PC += offset << 1
@instruction
def CMP(cpu, reg, compare):
notcmp = ~compare.read() & Mask(cpu.address_bit_size)
cpu._ADD(reg.read(), notcmp, 1)
@instruction
def POP(cpu, *regs):
for reg in regs:
val = cpu.stack_pop(cpu.address_bit_size // 8)
if reg.reg in ("PC", "R15"):
cpu._set_mode_by_val(val)
val = val & ~0x1
reg.write(val)
@instruction
def PUSH(cpu, *regs):
high_to_low_regs = [r.read() for r in regs[::-1]]
for reg in high_to_low_regs:
cpu.stack_push(reg)
@instruction
def CLZ(cpu, dest, src):
# Check if the |pos| bit is 1, pos being the offset from the MSB
value = src.read()
msb = cpu.address_bit_size - 1
result = 32
for pos in range(cpu.address_bit_size):
cond = Operators.EXTRACT(value, pos, 1) == 1
result = Operators.ITEBV(cpu.address_bit_size, cond, msb - pos, result)
dest.write(result)
@instruction
def NOP(cpu):
pass
@instruction
def REV(cpu, dest, op):
opval = op.read()
_bytes = list()
for i in range(4):
_bytes.append(Operators.EXTRACT(opval, i * 8, 8))
dest.write(Operators.CONCAT(32, *_bytes))
@instruction
def SXTH(cpu, dest, op):
_op = op.read()
dest.write(Operators.SEXTEND(Operators.EXTRACT(_op, 0, 16), 16, 32))
def _LDM(cpu, insn_id, base, regs):
"""
LDM (Load Multiple) loads a non-empty subset, or possibly all, of the general-purpose registers from
sequential memory locations. It is useful for block loads, stack operations and procedure exit sequences.
:param int insn_id: should be one of ARM_INS_LDM, ARM_INS_LDMIB, ARM_INS_LDMDA, ARM_INS_LDMDB
:param Armv7Operand base: Specifies the base register.
:param list[Armv7Operand] regs:
Is a list of registers. It specifies the set of registers to be loaded by the LDM instruction.
The registers are loaded in sequence, the lowest-numbered register from the lowest memory
address (start_address), through to the highest-numbered register from the highest memory
address (end_address). If the PC is specified in the register list (opcode bit[15] is set),
the instruction causes a branch to the address (data) loaded into the PC.
It's technically UNKNOWN if you writeback to a register you loaded into, but we let it slide.
"""
if cpu.instruction.usermode:
raise NotImplementedError("Use of the S bit is not supported")
increment = insn_id in (cs.arm.ARM_INS_LDM, cs.arm.ARM_INS_LDMIB)
after = insn_id in (cs.arm.ARM_INS_LDM, cs.arm.ARM_INS_LDMDA)
address = base.read()
for reg in regs:
if not after:
address += (1 if increment else -1) * (reg.size // 8)
reg.write(cpu.read_int(address, reg.size))
if reg.reg in ("PC", "R15"):
# The general-purpose registers loaded can include the PC. If they do, the word loaded for the PC is
# treated as an address and a branch occurs to that address. In ARMv5 and above, bit[0] of the loaded
# value determines whether execution continues after this branch in ARM state or in Thumb state, as
# though a BX instruction had been executed.
cpu._set_mode_by_val(cpu.PC)
cpu.PC = cpu.PC & ~1
if after:
address += (1 if increment else -1) * (reg.size // 8)
if cpu.instruction.writeback:
base.writeback(address)
@instruction
def LDM(cpu, base, *regs):
cpu._LDM(cs.arm.ARM_INS_LDM, base, regs)
@instruction
def LDMIB(cpu, base, *regs):
cpu._LDM(cs.arm.ARM_INS_LDMIB, base, regs)
@instruction
def LDMDA(cpu, base, *regs):
cpu._LDM(cs.arm.ARM_INS_LDMDA, base, regs)
@instruction
def LDMDB(cpu, base, *regs):
cpu._LDM(cs.arm.ARM_INS_LDMDB, base, regs)
def _STM(cpu, insn_id, base, regs):
"""
STM (Store Multiple) stores a non-empty subset (or possibly all) of the general-purpose registers to
sequential memory locations.
:param int insn_id: should be one of ARM_INS_STM, ARM_INS_STMIB, ARM_INS_STMDA, ARM_INS_STMDB
:param Armv7Operand base: Specifies the base register.
:param list[Armv7Operand] regs:
Is a list of registers. It specifies the set of registers to be stored by the STM instruction.
The registers are stored in sequence, the lowest-numbered register to the lowest
memory address (start_address), through to the highest-numbered register to the
highest memory address (end_address).
"""
if cpu.instruction.usermode:
raise NotImplementedError("Use of the S bit is not supported")
increment = insn_id in (cs.arm.ARM_INS_STM, cs.arm.ARM_INS_STMIB)
after = insn_id in (cs.arm.ARM_INS_STM, cs.arm.ARM_INS_STMDA)
address = base.read()
for reg in regs:
if not after:
address += (1 if increment else -1) * (reg.size // 8)
cpu.write_int(address, reg.read(), reg.size)
if after:
address += (1 if increment else -1) * (reg.size // 8)
if cpu.instruction.writeback:
base.writeback(address)
@instruction
def STM(cpu, base, *regs):
cpu._STM(cs.arm.ARM_INS_STM, base, regs)
@instruction
def STMIB(cpu, base, *regs):
cpu._STM(cs.arm.ARM_INS_STMIB, base, regs)
@instruction
def STMDA(cpu, base, *regs):
cpu._STM(cs.arm.ARM_INS_STMDA, base, regs)
@instruction
def STMDB(cpu, base, *regs):
cpu._STM(cs.arm.ARM_INS_STMDB, base, regs)
def _bitwise_instruction(cpu, operation, dest, op1, *op2):
if op2:
op2_val, carry = op2[0].read(with_carry=True)
result = operation(op1.read(), op2_val)
else:
op1_val, carry = op1.read(with_carry=True)
result = operation(op1_val)
if dest is not None:
dest.write(result)
cpu.set_flags(C=carry, N=HighBit(result), Z=(result == 0))
@instruction
def ORR(cpu, dest, op1, op2=None):
if op2 is not None:
cpu._bitwise_instruction(lambda x, y: x | y, dest, op1, op2)
else:
cpu._bitwise_instruction(lambda x, y: x | y, dest, dest, op1)
@instruction
def ORN(cpu, dest, op1, op2=None):
if op2 is not None:
cpu._bitwise_instruction(lambda x, y: x | ~y, dest, op1, op2)
else:
cpu._bitwise_instruction(lambda x, y: x | ~y, dest, dest, op1)
@instruction
def EOR(cpu, dest, op1, op2=None):
if op2 is not None:
cpu._bitwise_instruction(lambda x, y: x ^ y, dest, op1, op2)
else:
cpu._bitwise_instruction(lambda x, y: x ^ y, dest, dest, op1)
@instruction
def AND(cpu, dest, op1, op2=None):
if op2 is not None:
cpu._bitwise_instruction(lambda x, y: x & y, dest, op1, op2)
else:
cpu._bitwise_instruction(lambda x, y: x & y, dest, dest, op1)
@instruction
def TEQ(cpu, *operands):
cpu._bitwise_instruction(lambda x, y: x ^ y, None, *operands)
cpu.commit_flags()
@instruction
def TST(cpu, Rn, Rm):
shifted, carry = Rm.read(with_carry=True)
result = Rn.read() & shifted
cpu.set_flags(N=HighBit(result), Z=(result == 0), C=carry)
@instruction
def SVC(cpu, op):
if op.read() != 0:
logger.warning(f"Bad SVC number: {op.read():08}")
raise Interruption(0)
@instruction
def CMN(cpu, src, add):
result, carry, overflow = cpu._ADD(src.read(), add.read())
return result, carry, overflow
def _SR(cpu, insn_id, dest, op, *rest):
"""
Notes on Capstone behavior:
- In ARM mode, _SR reg has `rest`, but _SR imm does not, its baked into `op`.
- In ARM mode, `lsr r1, r2` will have a `rest[0]`
- In Thumb mode, `lsr r1, r2` will have an empty `rest`
- In ARM mode, something like `lsr r1, 3` will not have `rest` and op will be
the immediate.
"""
assert insn_id in (cs.arm.ARM_INS_ASR, cs.arm.ARM_INS_LSL, cs.arm.ARM_INS_LSR)
if insn_id == cs.arm.ARM_INS_ASR:
if rest and rest[0].type == "immediate":
srtype = cs.arm.ARM_SFT_ASR
else:
srtype = cs.arm.ARM_SFT_ASR_REG
elif insn_id == cs.arm.ARM_INS_LSL:
if rest and rest[0].type == "immediate":
srtype = cs.arm.ARM_SFT_LSL
else:
srtype = cs.arm.ARM_SFT_LSL_REG
elif insn_id == cs.arm.ARM_INS_LSR:
if rest and rest[0].type == "immediate":
srtype = cs.arm.ARM_SFT_LSR
else:
srtype = cs.arm.ARM_SFT_LSR_REG
carry = cpu.regfile.read("APSR_C")
if rest and rest[0].type == "register":
# FIXME we should make Operand.op private (and not accessible)
src_reg = cpu.instruction.reg_name(rest[0].op.reg).upper()
amount = cpu.regfile.read(src_reg)
result, carry = cpu._shift(op.read(), srtype, amount, carry)
elif rest and rest[0].type == "immediate":
amount = rest[0].read()
result, carry = cpu._shift(op.read(), srtype, amount, carry)
elif cpu.mode == cs.CS_MODE_THUMB:
amount = op.read()
result, carry = cpu._shift(dest.read(), srtype, amount, carry)
else:
result, carry = op.read(with_carry=True)
dest.write(result)
cpu.set_flags(N=HighBit(result), Z=(result == 0), C=carry)
@instruction
def ASR(cpu, dest, op, *rest):
cpu._SR(cs.arm.ARM_INS_ASR, dest, op, *rest)
@instruction
def LSL(cpu, dest, op, *rest):
cpu._SR(cs.arm.ARM_INS_LSL, dest, op, *rest)
@instruction
def LSR(cpu, dest, op, *rest):
cpu._SR(cs.arm.ARM_INS_LSR, dest, op, *rest)
@instruction
def UMULL(cpu, rdlo, rdhi, rn, rm):
result = UInt(rn.read(), cpu.address_bit_size * 2) * UInt(
rm.read(), cpu.address_bit_size * 2
)
rdhi.write(Operators.EXTRACT(result, cpu.address_bit_size, cpu.address_bit_size))
rdlo.write(GetNBits(result, cpu.address_bit_size))
cpu.set_flags(N=Bit(result, 63), Z=(result == 0))
@instruction
def MUL(cpu, dest, src1, src2):
width = cpu.address_bit_size
op1 = SInt(src1.read(), width)
op2 = SInt(src2.read(), width)
result = op1 * op2
dest.write(result & Mask(width))
cpu.set_flags(N=HighBit(result), Z=(result == 0))
@instruction
def MVN(cpu, dest, op):
cpu._bitwise_instruction(lambda x: ~x, dest, op)
@instruction
def MLA(cpu, dest, op1, op2, addend):
width = cpu.address_bit_size
op1_val = SInt(op1.read(), width)
op2_val = SInt(op2.read(), width)
add_val = SInt(addend.read(), width)
result = op1_val * op2_val + add_val
dest.write(result & Mask(cpu.address_bit_size))
cpu.set_flags(N=HighBit(result), Z=(result == 0))
@instruction
def BIC(cpu, dest, op1, op2=None):
if op2 is not None:
result = (op1.read() & ~op2.read()) & Mask(cpu.address_bit_size)
else:
result = (dest.read() & ~op1.read()) & Mask(cpu.address_bit_size)
dest.write(result)
cpu.set_flags(N=HighBit(result), Z=(result == 0))
def _VSTM(cpu, address, *regs):
for reg in regs:
cpu.write_int(address, reg.read(), reg.size)
address += reg.size // 8
return address
@instruction
def VSTMIA(cpu, base, *regs):
updated_address = cpu._VSTM(base.read(), *regs)
if cpu.instruction.writeback:
base.writeback(updated_address)
@instruction
def VSTMDB(cpu, base, *regs):
address = base.read() - cpu.address_bit_size // 8 * len(regs)
updated_address = cpu._VSTM(address, *regs)
if cpu.instruction.writeback:
base.writeback(updated_address)
@instruction
def VLDMIA(cpu, base, *regs):
cpu._LDM(cs.arm.ARM_INS_LDM, base, regs)
@instruction
def STCL(cpu, *operands):
pass
@instruction
def DMB(cpu, *operands):
"""
Used by the the __kuser_dmb ARM Linux user-space handler. This is a nop
under Manticore's memory and execution model.
"""
@instruction
def LDCL(cpu, *operands):
"""Occasionally used in glibc (longjmp in ld.so). Nop under our execution model."""
@instruction
def UQSUB8(cpu, dest, op1, op2):
src1 = op1.read()
src2 = op2.read()
result = []
for offset in reversed(range(0, op1.size, 8)):
byte1 = Operators.EXTRACT(src1, offset, 8)
byte2 = Operators.EXTRACT(src2, offset, 8)
byte_diff = byte1 - byte2
result.append(Operators.ITEBV(8, byte_diff < 0, 0, byte_diff))
dest.write(Operators.CONCAT(dest.size, *result))
| {
"repo_name": "montyly/manticore",
"path": "manticore/native/cpu/arm.py",
"copies": "1",
"size": "57246",
"license": "apache-2.0",
"hash": 1055243491017341300,
"line_mean": 34.358863496,
"line_max": 120,
"alpha_frac": 0.5630437061,
"autogenerated": false,
"ratio": 3.6071833648393197,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9666772122605238,
"avg_score": 0.000690989666816142,
"num_lines": 1619
} |
from functools import wraps
import logging
import sys
from etcd import client
from conf.appconfig import HEALTH_OK, HEALTH_FAILED, TOTEM_ETCD_SETTINGS
from deployer.services.storage.factory import get_store
from deployer.tasks.common import ping
from deployer.util import timeout
HEALTH_TIMEOUT_SECONDS = 10
log = logging.getLogger(__name__)
def _check(func):
"""
Wrapper that creates a dictionary response containing 'status' and
'details'.
where status can be
'ok': If wrapped function returns successfully.
'failed': If wrapped function throws error.
details is:
returned value from the wrapped function if no exception is thrown
else string representation of exception when exception is thrown
:param func: Function to be wrapped
:return: dictionary output containing keys 'status' and 'details'
:rtype: dict
"""
@wraps(func)
def inner(*args, **kwargs):
try:
return {
'status': HEALTH_OK,
'details': func(*args, **kwargs)
}
except:
log.exception('Health check failed')
return {
'status': HEALTH_FAILED,
'details': str(sys.exc_info()[1])
}
return inner
@timeout(HEALTH_TIMEOUT_SECONDS)
@_check
def _check_etcd():
etcd_cl = client.Client(
host=TOTEM_ETCD_SETTINGS['host'],
port=TOTEM_ETCD_SETTINGS['port'])
return {
'machines': etcd_cl.machines
}
@timeout(HEALTH_TIMEOUT_SECONDS)
@_check
def _check_store():
"""
Checks health of default store
"""
return get_store().health()
@timeout(HEALTH_TIMEOUT_SECONDS)
@_check
def _check_celery():
"""
Checks health for celery integration using ping-pong task output.
"""
output = ping.delay().get(timeout=HEALTH_TIMEOUT_SECONDS)
return 'Celery ping:%s' % output
def get_health(check_celery=True):
"""
Gets the health of the all the external services.
:return: dictionary with
key: service name like etcd, celery, elasticsearch
value: dictionary of health status
:rtype: dict
"""
health_status = {
'etcd': _check_etcd(),
'store': _check_store()
}
if check_celery:
health_status['celery'] = _check_celery()
return health_status
| {
"repo_name": "totem/cluster-deployer",
"path": "deployer/services/health.py",
"copies": "1",
"size": "2353",
"license": "mit",
"hash": -8482059612215861000,
"line_mean": 24.3010752688,
"line_max": 74,
"alpha_frac": 0.6298342541,
"autogenerated": false,
"ratio": 4.0017006802721085,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5131534934372108,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
import threading
import logging.config
import abc
import os
from amberdriver.common import drivermsg_pb2
from amberdriver.common.amber_pipes import AmberPipes
__author__ = 'paoolo'
LOGGER_NAME = 'MessageHandler'
pwd = os.path.dirname(os.path.abspath(__file__))
logging.config.fileConfig('%s/amber.ini' % pwd)
class MessageHandler(object):
def __init__(self, pipe_in, pipe_out):
self.__amber_pipes = AmberPipes(self, pipe_in, pipe_out)
self.__subscribers = []
self.__subscribers_lock = threading.Lock()
self.__logger = logging.getLogger(LOGGER_NAME)
def __call__(self, *args, **kwargs):
self.__amber_pipes(*args, **kwargs)
def is_alive(self):
return self.__amber_pipes.is_alive()
def get_pipes(self):
return self.__amber_pipes
@abc.abstractmethod
def handle_data_message(self, header, message):
pass
@abc.abstractmethod
def handle_subscribe_message(self, header, message):
pass
@abc.abstractmethod
def handle_unsubscribe_message(self, header, message):
pass
@abc.abstractmethod
def handle_client_died_message(self, client_id):
pass
def fill_subscription_response(self, response_message):
pass
def send_subscribers_message(self):
subscribers = self.__get_subscribers()
if len(subscribers) > 0:
response_header = drivermsg_pb2.DriverHdr()
response_message = drivermsg_pb2.DriverMsg()
response_message.type = drivermsg_pb2.DriverMsg.DATA
response_message.ackNum = 0
response_header.clientIDs.extend(subscribers)
response_message = self.fill_subscription_response(response_message)
self.get_pipes().write_header_and_message_to_pipe(response_header, response_message)
def __get_subscribers(self):
self.__subscribers_lock.acquire()
try:
return list(self.__subscribers)
finally:
self.__subscribers_lock.release()
def add_subscribers(self, client_ids):
self.__subscribers_lock.acquire()
try:
self.__subscribers.extend(client_ids)
finally:
self.__subscribers_lock.release()
def remove_subscriber(self, client_id):
self.__subscribers_lock.acquire()
try:
self.__subscribers.remove(client_id)
except ValueError:
self.__logger.warning('Client %d does not registered as subscriber', client_id)
finally:
self.__subscribers_lock.release()
def is_any_subscriber(self):
self.__subscribers_lock.acquire()
try:
return len(self.__subscribers) > 0
finally:
self.__subscribers_lock.release()
@staticmethod
def handle_and_response(func):
@wraps(func)
def wrapped(inst, received_header, received_message):
response_header = drivermsg_pb2.DriverHdr()
response_message = drivermsg_pb2.DriverMsg()
response_message.type = drivermsg_pb2.DriverMsg.DATA
response_message.ackNum = received_message.synNum
response_header.clientIDs.extend(received_header.clientIDs)
response_header, response_message = func(inst, received_header, received_message,
response_header, response_message)
inst.get_pipes().write_header_and_message_to_pipe(response_header, response_message)
return wrapped | {
"repo_name": "showmen15/testEEE",
"path": "src/amberdriver/common/message_handler.py",
"copies": "1",
"size": "3571",
"license": "mit",
"hash": -4160240774888401400,
"line_mean": 29.2711864407,
"line_max": 96,
"alpha_frac": 0.6317558107,
"autogenerated": false,
"ratio": 4.133101851851852,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5264857662551852,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
import threading
import logging.config
import time
import abc
import os
from amberdriver.common import drivermsg_pb2
from amberdriver.common.amber_pipes import AmberPipes
__author__ = 'paoolo'
pwd = os.path.dirname(os.path.abspath(__file__))
logging.config.fileConfig('%s/amber.ini' % pwd)
LOGGER_NAME = 'MessageHandler'
class MessageHandler(object):
def __init__(self, pipe_in, pipe_out):
self.__amber_pipes = AmberPipes(self, pipe_in, pipe_out)
self.__subscribers = []
self.__subscribers_lock = threading.Lock()
self.__logger = logging.getLogger(LOGGER_NAME)
def __call__(self, *args, **kwargs):
self.__amber_pipes(*args, **kwargs)
def run(self):
self.__amber_pipes.run()
def is_alive(self):
return self.__amber_pipes.is_alive()
def get_pipes(self):
return self.__amber_pipes
@abc.abstractmethod
def handle_data_message(self, header, message):
pass
@abc.abstractmethod
def handle_subscribe_message(self, header, message):
pass
@abc.abstractmethod
def handle_unsubscribe_message(self, header, message):
pass
@abc.abstractmethod
def handle_client_died_message(self, client_id):
pass
def fill_subscription_response(self, response_message):
pass
def send_subscribers_message(self):
subscribers = self.__get_subscribers()
if len(subscribers) > 0:
response_header = drivermsg_pb2.DriverHdr()
response_message = drivermsg_pb2.DriverMsg()
response_message.type = drivermsg_pb2.DriverMsg.DATA
response_message.ackNum = 0
response_header.clientIDs.extend(subscribers)
response_message = self.fill_subscription_response(response_message)
self.get_pipes().write_header_and_message_to_pipe(response_header, response_message)
def sending_loop(self):
while self.is_alive():
self.send_subscribers_message()
time.sleep(0.1)
def __get_subscribers(self):
self.__subscribers_lock.acquire()
try:
return list(self.__subscribers)
finally:
self.__subscribers_lock.release()
def add_subscribers(self, client_ids):
self.__subscribers_lock.acquire()
try:
self.__subscribers.extend(client_ids)
finally:
self.__subscribers_lock.release()
def remove_subscriber(self, client_id):
self.__subscribers_lock.acquire()
try:
self.__subscribers.remove(client_id)
except ValueError:
self.__logger.warning('Client %d does not registered as subscriber', client_id)
finally:
self.__subscribers_lock.release()
def is_any_subscriber(self):
self.__subscribers_lock.acquire()
try:
return len(self.__subscribers) > 0
finally:
self.__subscribers_lock.release()
@staticmethod
def handle_and_response(func):
@wraps(func)
def wrapped(inst, received_header, received_message):
response_header = drivermsg_pb2.DriverHdr()
response_message = drivermsg_pb2.DriverMsg()
response_message.type = drivermsg_pb2.DriverMsg.DATA
response_message.ackNum = received_message.synNum
response_header.clientIDs.extend(received_header.clientIDs)
response_header, response_message = func(inst, received_header, received_message,
response_header, response_message)
inst.get_pipes().write_header_and_message_to_pipe(response_header, response_message)
return wrapped | {
"repo_name": "project-capo/amber-python-drivers",
"path": "src/amberdriver/common/message_handler.py",
"copies": "1",
"size": "3769",
"license": "mit",
"hash": -7773805269012487000,
"line_mean": 28.453125,
"line_max": 96,
"alpha_frac": 0.6272220748,
"autogenerated": false,
"ratio": 4.105664488017429,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5232886562817429,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
import time
from fabric.tasks import WrappedCallableTask
from dogapi import dog_http_api
logger = logging.getLogger("fabric")
MAX_ARGS_LEN = 256
def setup(api_key, application_key=None):
global dog_http_api
dog_http_api.api_key = api_key
if application_key is not None:
dog_http_api.application_key = application_key
def _human_duration(d):
def pluralize(quantity, noun):
if quantity >= 2:
return "{0} {1}s".format(quantity, noun)
else:
return "{0} {1}".format(quantity, noun)
if d < 1:
return "less than 1 second"
elif d < 60:
return "{0}".format(pluralize(int(d), "second"))
elif d >= 61 and d < 3600:
return "{0}".format(pluralize(d/60, "minute"))
else:
return "{0} {1}".format(pluralize(d/3600, "hour"), pluralize(d % 3600, "minute"))
def _task_details(t):
return "%s.%s" % (t.__module__, t.__name__)
def _format_args(args, kwargs):
serialized_args = u", ".join(map(unicode, args)+[u"{0}={1}".format(k, kwargs[k]) for k in kwargs])
if len(serialized_args) > MAX_ARGS_LEN:
return serialized_args[:MAX_ARGS_LEN] + u"..."
else:
return serialized_args
def _text(t, args, kwargs, duration, output, error):
if error:
text = "{0}({1}) failed after {2} because of {3}.".format(_task_details(t), _format_args(args, kwargs), _human_duration(duration), error)
else:
text = "{0}({1}) ran for {2}.".format(_task_details(t), _format_args(args, kwargs), _human_duration(duration))
if output:
text += (u'\nOutput:\n'
u'%%%\n'
u'@@@\n'
u'{0}\n'
u'@@@').format(output)
return text
def _title(t, args, kwargs, error):
return "{0}".format(_task_details(t))
def _aggregation_key(t, args, kwargs, error):
return _task_details(t)
def _tags(t, args, kwargs, error):
return []
def notify(t):
"""Decorates a fabric task"""
@wraps(t)
def wrapper(*args, **kwargs):
start = time.time()
error = None
output = None
try:
r = t(*args, **kwargs)
if r:
if not isinstance(r, list):
r = [r]
output = '\n\n'.join(['%s\n%s\n%s' %
(res.command, res.stdout, res.stderr) for res in r]
)
except Exception as e:
error = e
end = time.time()
duration = end - start
try:
dog_http_api.event(_title(t, args, kwargs, error),
_text(t, args, kwargs, duration, output, error),
source_type_name="fabric",
alert_type="error" if error else "success",
priority="normal",
aggregation_key=_aggregation_key(t, args, kwargs, error),
tags=_tags(t, args, kwargs, error))
except Exception as e:
logger.warn("Datadog notification on task {0} failed with {1}".format(t.__name__, e))
if error:
raise error
else:
return r
return WrappedCallableTask(wrapper)
| {
"repo_name": "DataDog/dogapi",
"path": "src/dogapi/fab.py",
"copies": "1",
"size": "3259",
"license": "bsd-3-clause",
"hash": -8636410532286773000,
"line_mean": 30.9509803922,
"line_max": 145,
"alpha_frac": 0.5323718932,
"autogenerated": false,
"ratio": 3.6130820399113084,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4645453933111308,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
from collections import Counter
from celery import shared_task
from django.contrib.auth.models import User
from django.core.cache import cache
from django.db import transaction
from django.template.loader import render_to_string
from pontoon.base.models import (
Entity,
Locale,
Resource,
TranslatedResource,
)
from pontoon.sync.changeset import ChangeSet
from pontoon.sync.vcs.models import VCSProject
from pontoon.sync.utils import locale_directory_path
log = logging.getLogger(__name__)
def update_originals(db_project, now, full_scan=False):
vcs_project = VCSProject(db_project, locales=[], full_scan=full_scan)
with transaction.atomic():
removed_paths, added_paths = update_resources(db_project, vcs_project)
changeset = ChangeSet(db_project, vcs_project, now)
update_entities(db_project, vcs_project, changeset)
changeset.execute()
return changeset.changes, removed_paths, added_paths
def serial_task(timeout, lock_key="", on_error=None, **celery_args):
"""
Decorator ensures that there's only one running task with given task_name.
Decorated tasks are bound tasks, meaning their first argument is always their Task instance
:param timeout: time after which lock is released.
:param lock_key: allows to define different lock for respective parameters of task.
:param on_error: callback to be executed if an error is raised.
:param celery_args: argument passed to celery's shared_task decorator.
"""
def wrapper(func):
@shared_task(bind=True, **celery_args)
@wraps(func)
def wrapped_func(self, *args, **kwargs):
lock_name = "serial_task.{}[{}]".format(self.name, lock_key.format(*args, **kwargs))
# Acquire the lock
if not cache.add(lock_name, True, timeout=timeout):
error = RuntimeError("Can't execute task '{}' because the previously called"
" task is still running.".format(lock_name))
if callable(on_error):
on_error(error, *args, **kwargs)
raise error
try:
return func(self, *args, **kwargs)
finally:
# release the lock
cache.delete(lock_name)
return wrapped_func
return wrapper
def collect_entities(db_project, vcs_project, unsynced_locales=None):
"""
Find all the entities in the database and on the filesystem and
match them together, yielding tuples of the form
(entity_key, database_entity, vcs_entity).
When a match isn't found, the missing entity will be None.
"""
changed_resources = None if unsynced_locales else vcs_project.changed_files
db_entities = get_db_entities(db_project, changed_resources)
vcs_entities = get_vcs_entities(vcs_project)
entity_keys = set().union(db_entities.keys(), vcs_entities.keys())
for key in entity_keys:
yield key, db_entities.get(key, None), vcs_entities.get(key, None)
def update_entities(db_project, vcs_project, changeset):
for key, db_entity, vcs_entity in collect_entities(db_project, vcs_project):
if vcs_entity is None:
if db_entity is None:
# This should never happen. What? Hard abort.
raise ValueError(u'No entities found for key `{0}`'.format(key))
else:
# VCS no longer has the entity, obsolete it.
changeset.obsolete_db_entity(db_entity)
elif db_entity is None:
# New VCS entities are added to Pontoon.
changeset.create_db_entity(vcs_entity)
else:
changeset.update_db_source_entity(db_entity, vcs_entity)
def update_resources(db_project, vcs_project):
"""Update the database on what resource files exist in VCS."""
log.debug('Scanning {}'.format(vcs_project.source_directory_path))
_, vcs_removed_files = vcs_project.changed_source_files
removed_resources = db_project.resources.filter(path__in=vcs_removed_files)
removed_paths = removed_resources.values_list('path', flat=True)
added_paths = []
log.debug('Removed files: {}'.format(', '.join(removed_paths) or 'None'))
removed_resources.delete()
for relative_path, vcs_resource in vcs_project.resources.items():
resource, created = db_project.resources.get_or_create(path=relative_path)
resource.format = Resource.get_path_format(relative_path)
resource.total_strings = len(vcs_resource.entities)
resource.save()
if created:
added_paths.append(relative_path)
log.debug('Added files: {}'.format(', '.join(added_paths) or 'None'))
return removed_paths, added_paths
def update_translations(db_project, vcs_project, locale, changeset):
for key, db_entity, vcs_entity in collect_entities(db_project, vcs_project, db_project.unsynced_locales):
# If we don't have both the db_entity and cs_entity we can't
# do anything with the translations.
if db_entity is None or vcs_entity is None:
continue
if not vcs_entity.has_translation_for(locale.code):
# VCS lacks an entity for this locale, so we can't
# pull updates nor edit it. Skip it!
continue
if db_entity.has_changed(locale):
# Pontoon changes overwrite whatever VCS has.
changeset.update_vcs_entity(locale, db_entity, vcs_entity)
else:
# If Pontoon has nothing or has not changed, and the VCS
# still has the entity, update Pontoon with whatever may
# have changed.
changeset.update_db_entity(locale, db_entity, vcs_entity)
def update_translated_resources(db_project, vcs_project, locale):
"""Update the TranslatedResource entries in the database."""
for resource in db_project.resources.all():
# We only want to create/update the TranslatedResource object if the
# resource exists in the current locale, UNLESS the file is asymmetric.
vcs_resource = vcs_project.resources.get(resource.path, None)
if vcs_resource is not None:
resource_exists = vcs_resource.files.get(locale) is not None
if resource_exists or resource.is_asymmetric:
translatedresource, _ = TranslatedResource.objects.get_or_create(resource=resource, locale=locale)
translatedresource.calculate_stats()
def get_vcs_entities(vcs_project):
return {entity_key(entity): entity for entity in vcs_project.entities}
def get_changed_entities(db_project, changed_resources):
entities = (Entity.objects
.select_related('resource')
.prefetch_related('changed_locales')
.filter(resource__project=db_project, obsolete=False))
if changed_resources is not None:
entities = entities.filter(resource__path__in=changed_resources)
return entities
def get_db_entities(db_project, changed_resources=None):
return {entity_key(entity): entity for entity in get_changed_entities(db_project, changed_resources)}
def entity_key(entity):
"""
Generate a key for the given entity that is unique within the
project.
"""
key = entity.key or entity.string
return ':'.join([entity.resource.path, key])
def pull_changes(db_project, source_only=False):
"""
Update the local files with changes from the VCS. Returns True
if any of the updated repos have changed since the last sync.
"""
changed = False
repositories = [db_project.source_repository] if source_only else db_project.repositories.all()
repo_locales = {}
skip_locales = [] # Skip already pulled locales
for repo in repositories:
repo_revisions = repo.pull(skip_locales)
repo_locales[repo.pk] = Locale.objects.filter(code__in=repo_revisions.keys())
skip_locales += repo_revisions.keys()
# If any revision is None, we can't be sure if a change
# happened or not, so we default to assuming it did.
unsure_change = None in repo_revisions.values()
if unsure_change or repo_revisions != repo.last_synced_revisions:
changed = True
return changed, repo_locales
def commit_changes(db_project, vcs_project, changeset, locale):
"""Commit the changes we've made back to the VCS."""
authors = changeset.commit_authors_per_locale.get(locale.code, [])
# Use the top translator for this batch as commit author, or
# the fake Pontoon user if there are no authors.
if len(authors) > 0:
commit_author = Counter(authors).most_common(1)[0][0]
else:
commit_author = User(first_name="Mozilla Pontoon", email="pontoon@mozilla.com")
commit_message = render_to_string('sync/commit_message.jinja', {
'locale': locale,
'project': db_project,
'authors': set(authors)
})
locale_path = locale_directory_path(vcs_project.checkout_path, locale.code)
repo = db_project.repository_for_path(locale_path)
repo.commit(commit_message, commit_author, locale_path)
| {
"repo_name": "participedia/pontoon",
"path": "pontoon/sync/core.py",
"copies": "1",
"size": "9151",
"license": "bsd-3-clause",
"hash": -1823828266417518300,
"line_mean": 38.2746781116,
"line_max": 114,
"alpha_frac": 0.6669216479,
"autogenerated": false,
"ratio": 3.996069868995633,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5162991516895633,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
from django.conf import settings
from django.contrib.gis.geos import Point
from molly.conf import get_app
from molly.utils import haversine
from molly.geolocation.models import Geocode
__all__ = ['geocode', 'reverse_geocode']
logger = logging.getLogger(__name__)
def _cached(getargsfunc):
def g(f):
@wraps(f)
def h(*args, **kwargs):
args = getargsfunc(*args, **kwargs)
app = get_app('molly.geolocation', args.pop('local_name', None))
try:
geocode = Geocode.recent.get(local_name=app.local_name, **args)
logger.debug('Found cached geocode')
return geocode.results
except Geocode.DoesNotExist:
logger.debug('Geocode not found in cache')
pass
except Geocode.MultipleObjectsReturned:
Geocode.recent.filter(local_name=app.local_name, **args).delete()
results = f(providers=app.providers, **args)
i = 0
while i < len(results):
loc, name = Point(results[i]['location']), results[i]['name']
if any((r['name'] == name and haversine(Point(r['location']), loc) < 100) for r in results[:i]):
results[i:i+1] = []
else:
i += 1
if hasattr(app, 'prefer_results_near'):
point = Point(app.prefer_results_near[:2])
distance = app.prefer_results_near[2]
filtered_results = [
result for result in results if
haversine(Point(result['location']), point) <= distance]
if filtered_results:
results = filtered_results
try:
geocode, created = Geocode.objects.get_or_create(
local_name=app.local_name, **args)
except Geocode.MultipleObjectsReturned:
Geocode.objects.filter(local_name=app.local_name, **args).delete()
geocode, created = Geocode.objects.get_or_create(
local_name=app.local_name, **args)
geocode.results = results
geocode.save()
return results
return h
return g
@_cached(lambda query,local_name=None: {'query':query, 'local_name':local_name})
def geocode(query, providers):
results = []
for provider in providers:
results += provider.geocode(query)
return results
@_cached(lambda lon,lat,local_name=None: {'lon': lon, 'lat':lat, 'local_name':local_name})
def reverse_geocode(lon, lat, providers):
results = []
for provider in providers:
results += provider.reverse_geocode(lon, lat)
return results
| {
"repo_name": "mollyproject/mollyproject",
"path": "molly/geolocation/__init__.py",
"copies": "1",
"size": "2807",
"license": "apache-2.0",
"hash": -4829905433721100000,
"line_mean": 35.9342105263,
"line_max": 112,
"alpha_frac": 0.5632347702,
"autogenerated": false,
"ratio": 4.134020618556701,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.020759926099639548,
"num_lines": 76
} |
from functools import wraps
import logging
from django.conf.urls import include, patterns, url
from django.contrib import auth, messages
from django.contrib.auth.forms import AuthenticationForm
from django.core.exceptions import ValidationError
from django.core.urlresolvers import get_callable, reverse
from django.forms.models import ModelForm, inlineformset_factory
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import get_language, ugettext as _
import plata
from plata.shop import forms as shop_forms
logger = logging.getLogger('plata.shop.views')
def cart_not_empty(order, shop, request, **kwargs):
"""Redirect to cart if later in checkout process and cart empty"""
if not order or not order.items.count():
messages.warning(request, _('Cart is empty.'))
return shop.redirect('plata_shop_cart')
def order_already_confirmed(order, shop, request, **kwargs):
"""
Redirect to confirmation or already paid view if the order is already
confirmed
"""
if order and order.status >= order.CONFIRMED:
if not order.balance_remaining:
return shop.redirect('plata_order_success')
messages.warning(request, _(
'You have already confirmed this order earlier, but it is not'
' fully paid for yet.'))
return HttpResponseRedirect(
shop.reverse_url('plata_shop_confirmation') + '?confirmed=1')
def order_cart_validates(order, shop, request, **kwargs):
"""
Redirect to cart if stock is insufficient and display an error message
"""
if request.method != 'GET':
return
try:
order.validate(order.VALIDATE_CART)
except ValidationError, e:
for message in e.messages:
messages.error(request, message)
return HttpResponseRedirect(
shop.reverse_url('plata_shop_cart') + '?e=1')
def order_cart_warnings(order, shop, request, **kwargs):
"""Show warnings in cart, but don't redirect (meant as a replacement for
``order_cart_validates``, but usable on the cart view itself)"""
if request.method != 'GET' or request.GET.get('e') or not order:
return
try:
order.validate(order.VALIDATE_CART)
except ValidationError, e:
for message in e.messages:
messages.warning(request, message)
def checkout_process_decorator(*checks):
"""
Calls all passed checkout process decorators in turn::
@checkout_process_decorator(order_already_confirmed,
order_cart_validates)
All checkout process decorators are called with the order, the shop
instance and the request as keyword arguments. In the future, additional
keywords might be added, your decorators should accept ``**kwargs`` as
well for future compatibility.
"""
def _dec(fn):
def _fn(request, *args, **kwargs):
shop = plata.shop_instance()
order = shop.order_from_request(request)
for check in checks:
r = check(order=order, shop=shop, request=request)
if r:
return r
return fn(request, order=order, *args, **kwargs)
return wraps(fn)(_fn)
return _dec
class Shop(object):
"""
Plata's view and shop processing logic is contained inside this class.
Shop needs a few model classes with relations between them:
- Contact model linking to Django's auth.user
- Order model with order items and an applied discount model
- Discount model
- Default currency for the shop (if you do not override default_currency
in your own Shop subclass)
Example::
shop_instance = Shop(Contact, Order, Discount)
urlpatterns = patterns('',
url(r'^shop/', include(shop_instance.urls)),
)
"""
#: The base template used in all default checkout templates
base_template = 'base.html'
cart_template = 'plata/shop_cart.html'
checkout_template = 'plata/shop_checkout.html'
discount_template = 'plata/shop_discounts.html'
confirmation_template = 'plata/shop_confirmation.html'
success_template = 'plata/shop_order_success.html'
failure_template = 'plata/shop_order_payment_failure.html'
def __init__(self, contact_model, order_model, discount_model,
default_currency=None, **kwargs):
self.contact_model = contact_model
self.order_model = order_model
self.orderitem_model = self.order_model.items.related.model
self.discount_model = discount_model
self._default_currency = default_currency
# Globally register the instance so that it can be accessed from
# everywhere using plata.shop_instance()
plata.register(self)
for key, value in kwargs.items():
if not hasattr(self, key):
raise TypeError('%s() received an invalid keyword %r' % (
self.__class__.__name__, key))
setattr(self, key, value)
@property
def urls(self):
"""Property offering access to the Shop-managed URL patterns"""
return self.get_urls()
def get_urls(self):
return self.get_shop_urls() + self.get_payment_urls()
def get_cart_url(self):
return url(r'^cart/$', checkout_process_decorator(
order_already_confirmed
)(self.cart), name='plata_shop_cart')
def get_checkout_url(self):
return url(r'^checkout/$', checkout_process_decorator(
cart_not_empty, order_already_confirmed, order_cart_validates,
)(self.checkout), name='plata_shop_checkout')
def get_discounts_url(self):
return url(r'^discounts/$', checkout_process_decorator(
cart_not_empty, order_already_confirmed, order_cart_validates,
)(self.discounts), name='plata_shop_discounts')
def get_confirmation_url(self):
return url(r'^confirmation/$', checkout_process_decorator(
cart_not_empty, order_cart_validates,
)(self.confirmation), name='plata_shop_confirmation')
def get_success_url(self):
return url(
r'^order/success/$',
self.order_success,
name='plata_order_success'
)
def get_failure_url(self):
return url(
r'^order/payment_failure/$',
self.order_payment_failure,
name='plata_order_payment_failure'
)
def get_new_url(self):
return url(r'^order/new/$', self.order_new, name='plata_order_new')
def get_shop_urls(self):
return patterns(
'',
self.get_cart_url(),
self.get_checkout_url(),
self.get_discounts_url(),
self.get_confirmation_url(),
self.get_success_url(),
self.get_failure_url(),
self.get_new_url(),
)
def get_payment_urls(self):
urls = [
url(r'', include(module.urls))
for module in self.get_payment_modules()
]
return patterns('', *urls)
def get_payment_modules(self, request=None):
"""
Import and return all payment modules defined in
``PLATA_PAYMENT_MODULES``
If request is given only applicable modules are loaded.
"""
all_modules = [
get_callable(module)(self)
for module in plata.settings.PLATA_PAYMENT_MODULES]
if not request:
return all_modules
return [
module for module in all_modules
if module.enabled_for_request(request)]
def default_currency(self, request=None):
"""
Return the default currency for instantiating new orders
Override this with your own implementation if you have a
multi-currency shop with auto-detection of currencies.
"""
return self._default_currency or plata.settings.CURRENCIES[0]
def price_includes_tax(self, request=None):
"""
Return if the shop should show prices including tax
This returns the PLATA_PRICE_INCLUDES_TAX settings by default
and is meant to be overridden by subclassing the Shop.
"""
if request:
order = self.order_from_request(request)
if order:
return order.price_includes_tax
return plata.settings.PLATA_PRICE_INCLUDES_TAX
def set_order_on_request(self, request, order):
"""
Helper method encapsulating the process of setting the current order
in the session. Pass ``None`` if you want to remove any defined order
from the session.
"""
if order:
request.session['shop_order'] = order.pk
elif 'shop_order' in request.session:
del request.session['shop_order']
def order_from_request(self, request, create=False):
"""
Instantiate the order instance for the current session. Optionally
creates a new order instance if ``create=True``.
Returns ``None`` if unable to find an offer.
"""
try:
order_pk = request.session.get('shop_order')
if order_pk is None:
raise ValueError("no order in session")
return self.order_model.objects.get(pk=order_pk)
except AttributeError:
# request has no session
return None
except (ValueError, self.order_model.DoesNotExist):
if create:
contact = self.contact_from_user(request.user)
order = self.order_model.objects.create(
currency=getattr(
contact,
'currency',
self.default_currency(request)),
user=getattr(
contact,
'user',
request.user if request.user.is_authenticated()
else None),
language_code=get_language(),
)
self.set_order_on_request(request, order)
return order
return None
def contact_from_user(self, user):
"""
Return the contact object bound to the current user if the user is
authenticated. Returns ``None`` if no contact exists.
"""
if not user.is_authenticated():
return None
try:
return self.contact_model.objects.get(user=user)
except self.contact_model.DoesNotExist:
return None
def get_context(self, request, context, **kwargs):
"""
Helper method returning a context dict. Override this if you
need additional context variables.
"""
ctx = {
'base_template': self.base_template,
}
ctx.update(context)
ctx.update(kwargs)
return ctx
def render(self, request, template, context):
"""
Helper which just passes everything on to ``django.shortcuts.render``
"""
return render(request, template, context)
def reverse_url(self, url_name, *args, **kwargs):
"""
Hook for customizing the reverse function
"""
return reverse(url_name, *args, **kwargs)
def redirect(self, url_name, *args, **kwargs):
"""
Hook for customizing the redirect function when used as application
content
"""
return HttpResponseRedirect(
self.reverse_url(url_name, *args, **kwargs))
def cart(self, request, order):
"""Shopping cart view"""
if not order or not order.items.count():
return self.render_cart_empty(request, {
'progress': 'cart',
})
OrderItemFormset = inlineformset_factory(
self.order_model,
self.orderitem_model,
form=getattr(self, 'form', ModelForm),
extra=0,
fields=('quantity',),
)
if request.method == 'POST':
formset = OrderItemFormset(request.POST, instance=order)
if formset.is_valid():
changed = False
# We cannot directly save the formset, because the additional
# checks in modify_item must be performed.
for form in formset.forms:
if not form.instance.product_id:
form.instance.delete()
messages.warning(request, _(
'%(name)s has been removed from the inventory'
' and from your cart as well.') % {
'name': form.instance.name,
})
changed = True
elif (formset.can_delete
and formset._should_delete_form(form)):
if order.is_confirmed():
raise ValidationError(_(
'Cannot modify order once'
' it has been confirmed.'),
code='order_sealed')
form.instance.delete()
changed = True
elif form.has_changed():
order.modify_item(
form.instance.product,
absolute=form.cleaned_data['quantity'],
recalculate=False,
item=form.instance,
)
changed = True
if changed:
order.recalculate_total()
messages.success(request, _('The cart has been updated.'))
if 'checkout' in request.POST:
return self.redirect('plata_shop_checkout')
return HttpResponseRedirect('.')
else:
formset = OrderItemFormset(instance=order)
return self.render_cart(request, {
'order': order,
'orderitemformset': formset,
'progress': 'cart',
})
def render_cart_empty(self, request, context):
"""Renders a cart-is-empty page"""
context.update({'empty': True})
return self.render(
request, self.cart_template, self.get_context(request, context))
def render_cart(self, request, context):
"""Renders the shopping cart"""
return self.render(
request, self.cart_template, self.get_context(request, context))
def checkout_form(self, request, order):
"""Returns the address form used in the first checkout step"""
# Only import plata.contact if necessary and if this method isn't
# overridden
from plata.contact.forms import CheckoutForm
return CheckoutForm
def get_authentication_form(self, **kwargs):
return AuthenticationForm(**kwargs)
def checkout(self, request, order):
"""Handles the first step of the checkout process"""
if not request.user.is_authenticated():
if request.method == 'POST' and '_login' in request.POST:
loginform = self.get_authentication_form(
data=request.POST,
prefix='login')
if loginform.is_valid():
user = loginform.get_user()
auth.login(request, user)
order.user = user
order.save()
return HttpResponseRedirect('.')
else:
loginform = self.get_authentication_form(prefix='login')
else:
loginform = None
if order.status < order.CHECKOUT:
order.update_status(order.CHECKOUT, 'Checkout process started')
OrderForm = self.checkout_form(request, order)
orderform_kwargs = {
'prefix': 'order',
'instance': order,
'request': request,
'shop': self,
}
if request.method == 'POST' and '_checkout' in request.POST:
orderform = OrderForm(request.POST, **orderform_kwargs)
if orderform.is_valid():
orderform.save()
if self.include_discount_step(request):
return self.redirect('plata_shop_discounts')
else:
return self.redirect('plata_shop_confirmation')
else:
orderform = OrderForm(**orderform_kwargs)
return self.render_checkout(request, {
'order': order,
'loginform': loginform,
'orderform': orderform,
'progress': 'checkout',
})
def render_checkout(self, request, context):
"""Renders the checkout page"""
return self.render(
request,
self.checkout_template,
self.get_context(request, context)
)
def include_discount_step(self, request):
return self.discount_model.objects.exists()
def discounts_form(self, request, order):
"""Returns the discount form"""
return shop_forms.DiscountForm
def discounts(self, request, order):
"""Handles the discount code entry page"""
if not self.include_discount_step(request):
return self.redirect('plata_shop_confirmation')
DiscountForm = self.discounts_form(request, order)
kwargs = {
'order': order,
'discount_model': self.discount_model,
'request': request,
'shop': self,
}
if request.method == 'POST':
form = DiscountForm(request.POST, **kwargs)
if form.is_valid():
form.save()
if 'proceed' in request.POST:
return self.redirect('plata_shop_confirmation')
return HttpResponseRedirect('.')
else:
form = DiscountForm(**kwargs)
order.recalculate_total()
return self.render_discounts(request, {
'order': order,
'form': form,
'progress': 'discounts',
})
def render_discounts(self, request, context):
"""Renders the discount code entry page"""
return self.render(
request,
self.discount_template,
self.get_context(request, context)
)
def confirmation_form(self, request, order):
"""Returns the confirmation and payment module selection form"""
return shop_forms.ConfirmationForm
def confirmation(self, request, order):
"""
Handles the order confirmation and payment module selection checkout
step
Hands off processing to the selected payment module if confirmation
was successful.
"""
order.recalculate_total()
ConfirmationForm = self.confirmation_form(request, order)
kwargs = {
'order': order,
'request': request,
'shop': self,
}
if request.method == 'POST':
form = ConfirmationForm(request.POST, **kwargs)
if form.is_valid():
return form.process_confirmation()
else:
form = ConfirmationForm(**kwargs)
return self.render_confirmation(request, {
'order': order,
'form': form,
# Whether the order had already been confirmed.
'confirmed': request.GET.get('confirmed', False),
'progress': 'confirmation',
})
def render_confirmation(self, request, context):
"""Renders the confirmation page"""
return self.render(
request,
self.confirmation_template,
self.get_context(request, context)
)
def order_success(self, request):
"""
Handles order successes (e.g. when an order has been successfully
paid for)
"""
order = self.order_from_request(request)
if not order:
return self.order_new(request)
if not order.balance_remaining:
# Create a new, empty order right away. It makes no sense
# to keep the completed order around anymore.
self.set_order_on_request(request, order=None)
return self.render(
request,
self.success_template,
self.get_context(
request, {
'order': order,
'progress': 'success',
}
)
)
def order_payment_failure(self, request):
"""Handles order payment failures"""
order = self.order_from_request(request)
logger.warn('Order payment failure for %s' % order.order_id)
if plata.settings.PLATA_STOCK_TRACKING:
StockTransaction = plata.stock_model()
for transaction in order.stock_transactions.filter(
type=StockTransaction.PAYMENT_PROCESS_RESERVATION):
transaction.delete()
order.payments.pending().delete()
if order.payments.authorized().exists():
# There authorized order payments around!
messages.warning(request, _('Payment failed, please try again.'))
logger.warn(
'Order %s is already partially paid, but payment'
' failed anyway!' % order.order_id)
elif order.status > order.CHECKOUT and order.status < order.PAID:
order.update_status(
order.CHECKOUT,
'Order payment failure, going back to checkout')
messages.info(request, _(
'Payment failed; you can continue editing your order and'
' try again.'))
return self.render(
request,
self.failure_template,
self.get_context(
request, {
'order': order,
'progress': 'failure',
}
)
)
def order_new(self, request):
"""
Forcibly create a new order and redirect user either to the frontpage
or to the URL passed as ``next`` GET parameter
"""
self.set_order_on_request(request, order=None)
next = request.GET.get('next')
if next:
return HttpResponseRedirect(next)
return HttpResponseRedirect('/')
| {
"repo_name": "armicron/plata",
"path": "plata/shop/views.py",
"copies": "3",
"size": "22496",
"license": "bsd-3-clause",
"hash": 1394221042862476300,
"line_mean": 32.6766467066,
"line_max": 78,
"alpha_frac": 0.5683677098,
"autogenerated": false,
"ratio": 4.64026402640264,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6708631736202639,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
from django.contrib.auth.models import User
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.db.models import Count
from django.http import HttpResponse, Http404
from django.shortcuts import render_to_response
from django.template import RequestContext
try:
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from django.contrib.csrf.middleware import csrf_exempt
from giraffe.publisher.models import Subscription, Asset
from giraffe.publisher import tasks
def test_mq(request):
try:
publ = tasks.ping.get_publisher(connect_timeout=10)
res = tasks.ping.apply_async(args=(), publisher=publ)
pong = res.get()
except Exception, exc:
return HttpResponse('%s: %s' % (type(exc).__name__, str(exc)),
content_type='text/plain')
return HttpResponse('OK', content_type='text/plain')
def index(request, page=1, template=None, content_type=None):
blogger = User.objects.all().order_by('id')[0].person
assets = Asset.objects.all().order_by('-published')
assets = assets.filter(author=blogger)
assets = assets.filter(in_reply_to=None)
# TODO: get the assets that the user is allowed to see
assets = assets.filter(private_to=None)
assets = assets.annotate(comment_count=Count('replies_in_thread'))
pager = Paginator(assets, 10)
try:
assets_page = pager.page(page)
except (EmptyPage, InvalidPage):
raise Http404
data = {
'assets': assets_page,
}
if template is None:
template = 'publisher/index.html'
return render_to_response(template, data,
context_instance=RequestContext(request), mimetype=content_type)
def asset(request, slug, template=None):
try:
asset = Asset.objects.annotate(comment_count=Count('replies_in_thread')).get(slug=slug)
except Asset.DoesNotExist:
raise Http404
# TODO: let users who are allowed to see the asset see it
if asset.private_to.count():
raise Http404
data = {
'asset': asset,
}
if template is None:
template = 'publisher/asset.html'
return render_to_response(template, data,
context_instance=RequestContext(request))
def oops(func):
@wraps(func)
def otherfunc(request, *args, **kwargs):
try:
return func(request, *args, **kwargs)
except Exception, exc:
logging.exception(exc)
raise
return otherfunc
@csrf_exempt
@oops
def subscribe(request):
log = logging.getLogger("%s.subscribe" % __name__)
if request.method != 'POST':
return HttpResponse('POST required', status=405, content_type='text/plain')
try:
callback = request.POST['hub.callback']
mode = request.POST['hub.mode']
topic = request.POST['hub.topic']
except KeyError, exc:
log.debug("Parameter %s required", str(exc))
return HttpResponse('Parameter %s required' % str(exc), status=400, content_type='text/plain')
verify = request.POST.getlist('hub.verify')
if not verify:
log.debug("Parameter verify required")
return HttpResponse('Parameter verify required', status=400, content_type='text/plain')
lease_secs = request.POST.get('hub.lease_seconds')
secret = request.POST.get('hub.secret')
verify_token = request.POST.get('hub.verify_token')
try:
sub = Subscription.objects.get(callback=callback)
except Subscription.DoesNotExist:
if mode == 'unsubscribe':
# Already gone!
return HttpResponse('', status=204)
sub = Subscription(callback=callback)
kwargs = {
'callback': callback,
'mode': mode,
'topic': topic,
'lease_seconds': lease_secs,
'secret': secret,
'verify_token': verify_token,
}
if mode not in ('subscribe', 'unsubscribe'):
log.debug("Unknown mode %r", mode)
return HttpResponse('Unknown mode %r' % mode, status=400, content_type='text/plain')
task = tasks.verify_subscription
if 'async' in verify:
task.delay(**kwargs)
return HttpResponse('', status=202, content_type='text/plain')
elif 'sync' in verify:
try:
task(**kwargs)
except Exception, exc:
log.debug("%s: %s", type(exc).__name__, str(exc))
return HttpResponse('%s: %s' % (type(exc).__name__, str(exc)), status=400, content_type='text/plain')
return HttpResponse('', status=204)
log.debug("This should not have happened")
return HttpResponse("No supported verification modes ('async' and 'sync') in %r" % verify, status=400, content_type='text/plain')
| {
"repo_name": "markpasc/giraffe",
"path": "giraffe/publisher/views.py",
"copies": "1",
"size": "4753",
"license": "mit",
"hash": 4608632564520997000,
"line_mean": 31.1148648649,
"line_max": 133,
"alpha_frac": 0.6482221755,
"autogenerated": false,
"ratio": 3.9575353871773524,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008700931989456313,
"num_lines": 148
} |
from functools import wraps
import logging
from django import forms
from django.contrib import auth, messages
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.urlresolvers import get_callable, reverse
from django.forms.models import inlineformset_factory
from django.http import HttpResponseRedirect
from django.shortcuts import redirect, render
from django.utils.translation import ugettext as _
import plata
from plata.shop import forms as shop_forms
from plata.shop import signals
logger = logging.getLogger('plata.shop.views')
def cart_not_empty(order, request, **kwargs):
"""Redirect to cart if later in checkout process and cart empty"""
if not order or not order.items.count():
messages.warning(request, _('Cart is empty.'))
return HttpResponseRedirect(reverse('plata_shop_cart'))
def order_already_confirmed(order, request, **kwargs):
"""Redirect to confirmation or already paid view if the order is already confirmed"""
if order and order.status >= order.CONFIRMED:
if not order.balance_remaining:
return redirect('plata_order_success')
messages.warning(request,
_('You have already confirmed this order earlier, but it is not fully paid for yet.'))
return HttpResponseRedirect(reverse('plata_shop_confirmation') + '?confirmed=1')
def order_cart_validates(order, request, **kwargs):
"""Redirect to cart if stock is insufficient and display an error message"""
if request.method != 'GET':
return
try:
order.validate(order.VALIDATE_CART)
except ValidationError, e:
for message in e.messages:
messages.error(request, message)
return HttpResponseRedirect(reverse('plata_shop_cart'))
def checkout_process_decorator(*checks):
"""
Calls all passed checkout process decorators in turn::
@checkout_process_decorator(order_already_confirmed, order_cart_validates)
def mymethod(self...):
# Whatever
"""
def _dec(fn):
def _fn(request, *args, **kwargs):
shop = plata.shop_instance()
order = shop.order_from_request(request)
for check in checks:
r = check(order=order, shop=shop, request=request)
if r: return r
return fn(request, order=order, *args, **kwargs)
return wraps(fn)(_fn)
return _dec
class Shop(object):
"""
Plata's view and shop processing logic is contained inside this class.
Shop needs a few model classes with relations between them:
- Contact model linking to Django's auth.user
- Order model with order items and an applied discount model
- Discount model
- Default currency for the shop (if you do not override default_currency
in your own Shop subclass)
Example::
shop_instance = Shop(Contact, Order, Discount)
urlpatterns = patterns('',
url(r'^shop/', include(shop_instance.urls)),
)
"""
#: The base template used in all default checkout templates
base_template = 'base.html'
def __init__(self, contact_model, order_model, discount_model,
default_currency=None, **kwargs):
self.contact_model = contact_model
self.order_model = order_model
self.orderitem_model = self.order_model.items.related.model
self.discount_model = discount_model
self._default_currency = default_currency
# Globally register the instance so that it can be accessed from
# everywhere using plata.shop_instance()
plata.register(self)
for key, value in kwargs.items():
if not hasattr(self, key):
raise TypeError('%s() received an invalid keyword %r' % (
self.__class__.__name__, key))
setattr(self, key, value)
@property
def urls(self):
"""Property offering access to the Shop-managed URL patterns"""
return self.get_urls()
def get_urls(self):
return self.get_shop_urls() + self.get_payment_urls()
def get_shop_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
url(r'^cart/$',
checkout_process_decorator(order_already_confirmed)(self.cart),
name='plata_shop_cart'),
url(r'^checkout/$', checkout_process_decorator(
cart_not_empty, order_already_confirmed, order_cart_validates,
)(self.checkout), name='plata_shop_checkout'),
url(r'^discounts/$', checkout_process_decorator(
cart_not_empty, order_already_confirmed, order_cart_validates,
)(self.discounts), name='plata_shop_discounts'),
url(r'^confirmation/$', checkout_process_decorator(
cart_not_empty, order_cart_validates,
)(self.confirmation), name='plata_shop_confirmation'),
url(r'^order/success/$',
self.order_success, name='plata_order_success'),
url(r'^order/payment_failure/$',
self.order_payment_failure, name='plata_order_payment_failure'),
url(r'^order/new/$',
self.order_new, name='plata_order_new'),
)
def get_payment_urls(self):
from django.conf.urls.defaults import patterns, url, include
urls = [url(r'', include(module.urls)) for module in self.get_payment_modules()]
return patterns('', *urls)
def get_payment_modules(self, request=None):
"""
Import and return all payment modules defined in ``PLATA_PAYMENT_MODULES``
If request is given only aplicable modules are loaded.
"""
all_modules = [get_callable(module)(self) for module in plata.settings.PLATA_PAYMENT_MODULES]
if not request:
return all_modules
return filter(lambda item: item.enabled_for_request(request), all_modules)
def default_currency(self, request=None):
"""
Return the default currency for instantiating new orders
Override this with your own implementation if you have a multi-currency
shop with auto-detection of currencies.
"""
return self._default_currency or plata.settings.CURRENCIES[0]
def set_order_on_request(self, request, order):
"""
Helper method encapsulating the process of setting the current order
in the session. Pass ``None`` if you want to remove any defined order
from the session.
"""
if order:
request.session['shop_order'] = order.pk
elif 'shop_order' in request.session:
del request.session['shop_order']
def order_from_request(self, request, create=False):
"""
Instantiate the order instance for the current session. Optionally creates
a new order instance if ``create=True``.
Returns ``None`` if unable to find an offer.
"""
try:
order_pk = request.session.get('shop_order')
if order_pk is None:
raise ValueError("no order in session")
return self.order_model.objects.get(pk=order_pk)
except (ValueError, self.order_model.DoesNotExist):
if create:
contact = self.contact_from_user(request.user)
order = self.order_model.objects.create(
currency=getattr(contact, 'currency', self.default_currency(request)),
user=getattr(contact, 'user',
request.user if request.user.is_authenticated() else None),
language_code=getattr(request, 'LANGUAGE_CODE', ''),
)
self.set_order_on_request(request, order)
return order
return None
def contact_from_user(self, user):
"""
Return the contact object bound to the current user if the user is
authenticated. Returns ``None`` if no contact exists.
"""
if not user.is_authenticated():
return None
try:
return self.contact_model.objects.get(user=user)
except self.contact_model.DoesNotExist:
return None
def get_context(self, request, context, **kwargs):
"""
Helper method returning a context dict. Override this if you
need additional context variables.
"""
ctx = {
'base_template': self.base_template,
}
ctx.update(context)
ctx.update(kwargs)
return ctx
def render(self, request, template, context):
"""
Helper which just passes everything on to ``django.shortcuts.render``
"""
return render(request, template, context)
def cart(self, request, order):
"""Shopping cart view"""
if not order or not order.items.count():
return self.render_cart_empty(request, {
'progress': 'cart',
})
OrderItemFormset = inlineformset_factory(
self.order_model,
self.orderitem_model,
extra=0,
fields=('quantity',),
)
if request.method == 'POST':
formset = OrderItemFormset(request.POST, instance=order)
if formset.is_valid():
changed = False
# We cannot directly save the formset, because the additional
# checks in modify_item must be performed.
for form in formset.forms:
if formset.can_delete and formset._should_delete_form(form):
order.modify_item(form.instance.product,
absolute=0,
recalculate=False)
changed = True
elif form.has_changed():
order.modify_item(form.instance.product,
absolute=form.cleaned_data['quantity'],
recalculate=False)
changed = True
if changed:
order.recalculate_total()
messages.success(request, _('The cart has been updated.'))
if 'checkout' in request.POST:
return redirect('plata_shop_checkout')
return HttpResponseRedirect('.')
else:
formset = OrderItemFormset(instance=order)
return self.render_cart(request, {
'order': order,
'orderitemformset': formset,
'progress': 'cart',
})
def render_cart_empty(self, request, context):
"""Renders a cart-is-empty page"""
context.update({'empty': True})
return self.render(request, 'plata/shop_cart.html',
self.get_context(request, context))
def render_cart(self, request, context):
"""Renders the shopping cart"""
return self.render(request, 'plata/shop_cart.html',
self.get_context(request, context))
def checkout_form(self, request, order):
"""Returns the address form used in the first checkout step"""
# Only import plata.contact if necessary and if this method isn't overridden
from plata.contact.forms import CheckoutForm
return CheckoutForm
def checkout(self, request, order):
"""Handles the first step of the checkout process"""
if not request.user.is_authenticated():
if request.method == 'POST' and '_login' in request.POST:
loginform = AuthenticationForm(data=request.POST, prefix='login')
if loginform.is_valid():
user = loginform.get_user()
auth.login(request, user)
order.user = user
order.save()
return HttpResponseRedirect('.')
else:
loginform = AuthenticationForm(prefix='login')
else:
loginform = None
if order.status < order.CHECKOUT:
order.update_status(order.CHECKOUT, 'Checkout process started')
OrderForm = self.checkout_form(request, order)
orderform_kwargs = {
'prefix': 'order',
'instance': order,
'request': request,
'shop': self,
}
if request.method == 'POST' and '_checkout' in request.POST:
orderform = OrderForm(request.POST, **orderform_kwargs)
if orderform.is_valid():
orderform.save()
return redirect('plata_shop_discounts')
else:
orderform = OrderForm(**orderform_kwargs)
return self.render_checkout(request, {
'order': order,
'loginform': loginform,
'orderform': orderform,
'progress': 'checkout',
})
def render_checkout(self, request, context):
"""Renders the checkout page"""
return self.render(request, 'plata/shop_checkout.html',
self.get_context(request, context))
def discounts_form(self, request, order):
"""Returns the discount form"""
return shop_forms.DiscountForm
def discounts(self, request, order):
"""Handles the discount code entry page"""
DiscountForm = self.discounts_form(request, order)
kwargs = {
'order': order,
'discount_model': self.discount_model,
'request': request,
'shop': self,
}
if request.method == 'POST':
form = DiscountForm(request.POST, **kwargs)
if form.is_valid():
form.save()
if 'proceed' in request.POST:
return redirect('plata_shop_confirmation')
return HttpResponseRedirect('.')
else:
form = DiscountForm(**kwargs)
order.recalculate_total()
return self.render_discounts(request, {
'order': order,
'form': form,
'progress': 'discounts',
})
def render_discounts(self, request, context):
"""Renders the discount code entry page"""
return self.render(request, 'plata/shop_discounts.html',
self.get_context(request, context))
def confirmation_form(self, request, order):
"""Returns the confirmation and payment module selection form"""
return shop_forms.ConfirmationForm
def confirmation(self, request, order):
"""
Handles the order confirmation and payment module selection checkout step
Hands off processing to the selected payment module if confirmation was
successful.
"""
order.recalculate_total()
ConfirmationForm = self.confirmation_form(request, order)
kwargs = {
'order': order,
'request': request,
'shop': self,
}
if request.method == 'POST':
form = ConfirmationForm(request.POST, **kwargs)
if form.is_valid():
return form.process_confirmation()
else:
form = ConfirmationForm(**kwargs)
return self.render_confirmation(request, {
'order': order,
'form': form,
'confirmed': request.GET.get('confirmed', False), # Whether the order had
# already been confirmed
'progress': 'confirmation',
})
def render_confirmation(self, request, context):
"""Renders the confirmation page"""
return self.render(request, 'plata/shop_confirmation.html',
self.get_context(request, context))
def order_success(self, request):
"""Handles order successes (e.g. when an order has been successfully paid for)"""
order = self.order_from_request(request)
if not order:
return self.order_new(request)
if not order.balance_remaining:
# Create a new, empty order right away. It makes no sense
# to keep the completed order around anymore.
self.set_order_on_request(request, order=None)
return self.render(request, 'plata/shop_order_success.html',
self.get_context(request, {
'order': order,
'progress': 'success',
}))
def order_payment_failure(self, request):
"""Handles order payment failures"""
order = self.order_from_request(request)
logger.warn('Order payment failure for %s' % order.order_id)
if plata.settings.PLATA_STOCK_TRACKING:
for transaction in order.stock_transactions.filter(
type=order.stock_transactions.model.PAYMENT_PROCESS_RESERVATION):
transaction.delete()
order.payments.pending().delete()
if order.payments.authorized().exists():
# There authorized order payments around!
messages.warning(request, _('Payment failed, please try again.'))
logger.warn('Order %s is already partially paid, but payment failed anyway!' % order.order_id)
elif order.status > order.CHECKOUT and order.status < order.PAID:
order.update_status(order.CHECKOUT, 'Order payment failure, going back to checkout')
messages.info(request, _('Payment failed; you can continue editing your order and try again.'))
return self.render(request, 'plata/shop_order_payment_failure.html',
self.get_context(request, {
'order': order,
'progress': 'failure',
}))
def order_new(self, request):
"""
Forcibly create a new order and redirect user either to the frontpage
or to the URL passed as ``next`` GET parameter
"""
self.set_order_on_request(request, order=None)
next = request.GET.get('next')
if next:
return HttpResponseRedirect(next)
return HttpResponseRedirect('/')
| {
"repo_name": "allink/plata",
"path": "plata/shop/views.py",
"copies": "1",
"size": "18228",
"license": "bsd-3-clause",
"hash": 8086783992078865000,
"line_mean": 35.3832335329,
"line_max": 107,
"alpha_frac": 0.5895874479,
"autogenerated": false,
"ratio": 4.575301204819277,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0014335571070806118,
"num_lines": 501
} |
from functools import wraps
import logging
from flask import abort
from flask import Flask
from flask import jsonify
from flask import make_response
from flask import request
from flask import Response
from flask import url_for
from flask.views import MethodView
from peewee import *
from playhouse.flask_utils import get_object_or_404
from playhouse.flask_utils import PaginatedQuery
from werkzeug.exceptions import NotFound
from scout.constants import PROTECTED_KEYS
from scout.constants import RANKING_CHOICES
from scout.constants import SEARCH_BM25
from scout.exceptions import error
from scout.models import database
from scout.models import Attachment
from scout.models import BlobData
from scout.models import Document
from scout.models import Index
from scout.models import IndexDocument
from scout.models import Metadata
from scout.search import DocumentSearch
from scout.serializers import AttachmentSerializer
from scout.serializers import DocumentSerializer
from scout.serializers import IndexSerializer
from scout.validator import RequestValidator
attachment_serializer = AttachmentSerializer()
document_serializer = DocumentSerializer()
index_serializer = IndexSerializer()
engine = DocumentSearch()
validator = RequestValidator()
logger = logging.getLogger('scout')
def register_views(app):
prefix = app.config.get('URL_PREFIX') or ''
if prefix:
prefix = '/%s' % prefix.strip('/')
# Register views and request handlers.
index_view = IndexView(app)
index_view.register('index_view', '%s/' % prefix)
document_view = DocumentView(app)
document_view.register('document_view', '%s/documents/' % prefix)
attachment_view = AttachmentView(app)
attachment_view.register(
'attachment_view',
'%s/documents/<document_id>/attachments/' % prefix,
'path')
app.add_url_rule(
'%s/documents/<document_id>/attachments/<path:pk>/download/' % prefix,
view_func=authentication(app)(attachment_download))
def authentication(app):
def decorator(fn):
@wraps(fn)
def inner(*args, **kwargs):
api_key = app.config.get('AUTHENTICATION')
if not api_key:
return fn(*args, **kwargs)
# Check headers and request.args for `key=<key>`.
key = request.headers.get('key') or request.args.get('key')
if key != api_key:
logger.info('Authentication failure for key: %s', key)
return 'Invalid API key', 401
else:
return fn(*args, **kwargs)
return inner
return decorator
class ScoutView(object):
def __init__(self, app):
self.app = app
self.paginate_by = app.config.get('PAGINATE_BY') or 50
def register(self, name, url, pk_type=None):
auth = authentication(self.app)
base_views = (
(self.list_view, 'GET', name),
(self.create, 'POST', name + '_create'))
for view, method, view_name in base_views:
self.app.add_url_rule(url, view_name, view_func=auth(view),
methods=[method])
if pk_type is None:
detail_url = url + '<pk>/'
else:
detail_url = url + '<%s:pk>/' % pk_type
name += '_detail'
detail_views = (
(self.detail, ['GET'], name),
(self.update, ['POST', 'PUT'], name + '_update'),
(self.delete, ['DELETE'], name + '_delete'))
for view, methods, view_name in detail_views:
self.app.add_url_rule(detail_url, view_name, view_func=auth(view),
methods=methods)
def paginated_query(self, query, paginate_by=None):
return PaginatedQuery(
query,
paginate_by=paginate_by or self.paginate_by,
check_bounds=False)
def detail(self):
raise NotImplementedError
def list_view(self):
raise NotImplementedError
def create(self):
raise NotImplementedError
def update(self):
raise NotImplementedError
def delete(self):
raise NotImplementedError
def _search_response(self, index, allow_blank, document_count):
ranking = request.args.get('ranking') or SEARCH_BM25
if ranking not in RANKING_CHOICES:
error('Unrecognized "ranking" value. Valid options are %s' %
', '.join(RANKING_CHOICES))
ordering = request.args.getlist('ordering')
filters = validator.extract_get_params()
q = request.args.get('q', '').strip()
if not q and not allow_blank:
error('Search term is required.')
query = engine.search(q or '*', index, ranking, ordering, **filters)
pq = self.paginated_query(query)
response = {
'document_count': document_count,
'documents': document_serializer.serialize_query(
pq.get_object_list(),
include_score=True if q else False),
'filtered_count': query.count(),
'filters': filters,
'ordering': ordering,
'page': pq.get_page(),
'pages': pq.get_page_count(),
}
if q:
response.update(
ranking=ranking,
search_term=q)
return response
#
# Views.
#
class IndexView(ScoutView):
def detail(self, pk):
index = get_object_or_404(Index, Index.name == pk)
document_count = index.documents.count()
response = {'name': index.name, 'id': index.id}
response.update(self._search_response(index, True, document_count))
return jsonify(response)
def list_view(self):
query = (Index
.select(
Index,
fn.COUNT(IndexDocument.id).alias('document_count'))
.join(IndexDocument, JOIN.LEFT_OUTER)
.group_by(Index))
ordering = request.args.getlist('ordering')
query = engine.apply_sorting(query, ordering, {
'name': Index.name,
'document_count': SQL('document_count'),
'id': Index.id}, 'name')
pq = self.paginated_query(query)
return jsonify({
'indexes': [index_serializer.serialize(index)
for index in pq.get_object_list()],
'ordering': ordering,
'page': pq.get_page(),
'pages': pq.get_page_count()})
def create(self):
data = validator.parse_post(['name'])
with database.atomic():
try:
index = Index.create(name=data['name'])
except IntegrityError:
error('"%s" already exists.' % data['name'])
else:
logger.info('Created new index "%s"' % index.name)
return self.detail(index.name)
def update(self, pk):
index = get_object_or_404(Index, Index.name == pk)
data = validator.parse_post(['name'])
index.name = data['name']
with database.atomic():
try:
index.save()
except IntegrityError:
error('"%s" is already in use.' % index.name)
else:
logger.info('Updated index "%s"' % index.name)
return self.detail(index.name)
def delete(self, pk):
index = get_object_or_404(Index, Index.name == pk)
with database.atomic():
ndocs = (IndexDocument
.delete()
.where(IndexDocument.index == index)
.execute())
index.delete_instance()
logger.info('Deleted index "%s" and unlinked %s associated documents.',
index.name, ndocs)
return jsonify({'success': True})
class _FileProcessingView(ScoutView):
def _get_document(self, pk):
if isinstance(pk, int) or (pk and pk.isdigit()):
query = Document.all().where(Document._meta.primary_key == pk)
try:
return query.get()
except Document.DoesNotExist:
pass
return get_object_or_404(Document.all(), Document.identifier == pk)
def attach_files(self, document):
attachments = []
for identifier in request.files:
file_obj = request.files[identifier]
attachments.append(
document.attach(file_obj.filename, file_obj.read()))
logger.info('Attached %s to document id = %s',
file_obj.filename, document.get_id())
return attachments
class DocumentView(_FileProcessingView):
def detail(self, pk):
document = self._get_document(pk)
return jsonify(document_serializer.serialize(document))
def list_view(self):
# Allow filtering by index.
idx_list = request.args.getlist('index')
if idx_list:
indexes = Index.select(Index.id).where(Index.name << idx_list)
else:
indexes = None
document_count = Document.select().count()
return jsonify(self._search_response(indexes, True, document_count))
def create(self):
data = validator.parse_post(
['content'],
['identifier', 'index', 'indexes', 'metadata'])
indexes = validator.validate_indexes(data)
if indexes is None:
error('You must specify either an "index" or "indexes".')
if data.get('identifier'):
try:
document = self._get_document(data['identifier'])
except NotFound:
pass
else:
return self.update(data['identifier'])
document = Document.create(
content=data['content'],
identifier=data.get('identifier'))
if data.get('metadata'):
document.metadata = data['metadata']
logger.info('Created document with id=%s', document.get_id())
for index in indexes:
index.add_to_index(document)
logger.info('Added document %s to index %s',
document.get_id(), index.name)
if len(request.files):
self.attach_files(document)
return self.detail(document.get_id())
def update(self, pk):
document = self._get_document(pk)
data = validator.parse_post([], [
'content',
'identifier',
'index',
'indexes',
'metadata'])
save_document = False
if data.get('content'):
document.content = data['content']
save_document = True
if data.get('identifier'):
document.identifier = data['identifier']
save_document = True
if save_document:
document.save()
logger.info('Updated document with id = %s', document.get_id())
if 'metadata' in data:
del document.metadata
if data['metadata']:
document.metadata = data['metadata']
if len(request.files):
self.attach_files(document)
indexes = validator.validate_indexes(data, required=False)
if indexes is not None:
with database.atomic():
(IndexDocument
.delete()
.where(IndexDocument.document == document)
.execute())
if indexes:
IndexDocument.insert_many([
{'index': index, 'document': document}
for index in indexes]).execute()
return self.detail(document.get_id())
def delete(self, pk):
document = self._get_document(pk)
with database.atomic():
(IndexDocument
.delete()
.where(IndexDocument.document == document)
.execute())
(Attachment
.delete()
.where(Attachment.document == document)
.execute())
Metadata.delete().where(Metadata.document == document).execute()
document.delete_instance()
logger.info('Deleted document with id = %s', document.get_id())
return jsonify({'success': True})
class AttachmentView(_FileProcessingView):
def _get_attachment(self, document, pk):
return get_object_or_404(
document.attachments,
Attachment.filename == pk)
def detail(self, document_id, pk):
document = self._get_document(document_id)
attachment = self._get_attachment(document, pk)
return jsonify(attachment_serializer.serialize(attachment))
def list_view(self, document_id):
document = self._get_document(document_id)
query = (Attachment
.select(Attachment, BlobData)
.join(
BlobData,
on=(Attachment.hash == BlobData.hash).alias('_blob'))
.where(Attachment.document == document))
ordering = request.args.getlist('ordering')
query = engine.apply_rank_and_sort(query, None, ordering, {
'document': Attachment.document,
'hash': Attachment.hash,
'filename': Attachment.filename,
'mimetype': Attachment.mimetype,
'timestamp': Attachment.timestamp,
'id': Attachment.id,
}, 'filename')
pq = self.paginated_query(query)
return jsonify({
'attachments': [attachment_serializer.serialize(attachment)
for attachment in pq.get_object_list()],
'ordering': ordering,
'page': pq.get_page(),
'pages': pq.get_page_count()})
def create(self, document_id):
document = self._get_document(document_id)
validator.parse_post([], []) # Ensure POST data is clean.
if len(request.files):
attachments = self.attach_files(document)
else:
error('No file attachments found.')
return jsonify({'attachments': [
attachment_serializer.serialize(attachment)
for attachment in attachments]})
def update(self, document_id, pk):
document = self._get_document(document_id)
attachment = self._get_attachment(document, pk)
validator.parse_post([], []) # Ensure POST data is clean.
nfiles = len(request.files)
if nfiles == 1:
attachment.delete_instance()
self.attach_files(document)
elif nfiles > 1:
error('Only one attachment permitted when performing update.')
else:
error('No file attachment found.')
return self.detail(document.get_id(), attachment.filename)
def delete(self, document_id, pk):
document = self._get_document(document_id)
attachment = self._get_attachment(document, pk)
attachment.delete_instance()
return jsonify({'success': True})
def attachment_download(document_id, pk):
document = get_object_or_404(
Document.all(),
Document._meta.primary_key == document_id)
attachment = get_object_or_404(
document.attachments,
Attachment.filename == pk)
response = make_response(attachment.blob.data)
response.headers['Content-Type'] = attachment.mimetype
response.headers['Content-Length'] = attachment.length
response.headers['Content-Disposition'] = 'inline; filename=%s' % (
attachment.filename)
return response
| {
"repo_name": "coleifer/scout",
"path": "scout/views.py",
"copies": "1",
"size": "15537",
"license": "mit",
"hash": 262707142590612600,
"line_mean": 31.9173728814,
"line_max": 79,
"alpha_frac": 0.5770097187,
"autogenerated": false,
"ratio": 4.387743575261226,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5464753293961225,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
from flask import Blueprint, render_template, request, redirect, url_for, flash, g, jsonify
from flask.ext.login import login_required
# Project 5: adjusted imports to the minimal subset after model separation.
from talent_match import db
from ..models.talentInfo import Category
from ..forms import EditCategoryForm
logger = logging.getLogger(__name__)
app = Blueprint(
'categories', __name__, template_folder="templates", url_prefix="/categories")
def admin_required(f):
@wraps(f)
@login_required
def decorated(*args, **kwargs):
if not g.user.is_admin:
return redirect(url_for('auth.login'))
return f(*args, **kwargs)
return decorated
@app.route('/', methods=['GET', 'POST'])
@admin_required
def listCategories():
#user = dict(isAdmin = True, name='Steve', email='test-only-a-test')
#form = PickCategoriesForm()
form = None
# original category query
#categories = Category.query.all()
#categories.sort(key= lambda category: category.name)
# new category query - replacement that includes the count:
# reminder: may be able to use "from_statement" to utilize 'raw' sql statements
#
# This still seems to have a bug with the EmptyCategoryTest (category with no skills).
# There is something here that will need to be addressed long-term.
# Project 4 - Steve - bug fix for issue #6 (originally in Project 2)
# Using this is the fix to get the count correct for an empty category.
categories = []
categoryList = Category.query.all()
for cat in categoryList:
myCount = 0
if (cat.skillList):
myCount = len(cat.skillList)
categories.append(
dict(id=cat.id, name=cat.name, description=cat.description, count=myCount, deleted=cat.deleted))
categories.sort(key=lambda category: category['name'])
return render_template("categories.html", form=form, categories=categories, user=g.user)
@app.route('/delete', methods=['GET', 'POST'])
@admin_required
def deleteCategory():
categoryID = request.values.get('id')
if categoryID:
category = Category.query.get(categoryID)
if (category):
category.deleted = True
db.session.commit()
return redirect('/categories')
@app.route('/restore', methods=['GET', 'POST'])
@admin_required
def restoreCategory():
categoryID = request.values.get('id')
if categoryID:
category = Category.query.get(categoryID)
if (category):
category.deleted = False
db.session.commit()
return redirect('/categories')
@app.route('/edit', methods=['GET', 'POST'])
@admin_required
def editCategory():
isAddTalent = True # assume add to start
form = EditCategoryForm()
# Validate the submitted data
if form.validate_on_submit():
logger.info(form.data)
logger.info(form.name.data)
logger.info(form.description.data)
isCreate = False
if (form.id.data == ''):
isCreate = True
if (isCreate):
category = Category.query.filter_by(
name=form.name.data).limit(1).first()
if (category != None):
logger.info('existing category error')
flash('Category already exists', 'error')
return render_template("edit_category.html", editCategory=None, form=form, isAddTalent=True)
else:
category = Category(form.name.data, form.description.data)
db.session.add(category)
db.session.commit()
else:
category = Category.query.get(form.id.data)
category.description = form.description.data
category.name = form.name.data
db.session.commit()
return redirect('/categories')
else:
categoryID = request.values.get('id')
category = None
if categoryID != None:
isAddTalent = False
category = Category.query.get(categoryID)
form.description.data = category.description
form.id.data = categoryID
else:
isAddTalent = True
form.id.data = None
return render_template("edit_category.html", editCategory=category, form=form, isAddTalent=isAddTalent)
| {
"repo_name": "jordan-wright/talent-match",
"path": "talent_match/views/categories.py",
"copies": "1",
"size": "4347",
"license": "mit",
"hash": 1486204595815537400,
"line_mean": 33.5,
"line_max": 111,
"alpha_frac": 0.637221072,
"autogenerated": false,
"ratio": 4.0664172123479885,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5203638284347989,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
from flask import request, Response, Flask, jsonify, render_template
from tsg.search import search
from tsg.config import DICTIONARY_PATH, INDEXINFO_PATH, RANKER_K
from tsg.frontend.base import generate_detailed_list
app = Flask(__name__)
def check_auth(username, password):
"""This function is called to check if a username /
password combination is valid.
"""
# no worries if this is on GH for now..
return username == 'admin' and password == 'brianmoritzmiguel'
def authenticate():
"""Sends a 401 response that enables basic auth"""
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
@app.route('/api/search')
@requires_auth
def api_search():
query = request.args.get('query', '')
logging.info('Searching for \'{}\''.format(query))
results = search(query, DICTIONARY_PATH, INDEXINFO_PATH)
return jsonify(results=results)
@app.route('/')
@app.route('/index.html')
@requires_auth
def index():
'show front page'
return render_template('index.html')
@app.route('/search.html')
@requires_auth
def html_search():
'show results'
query = request.args.get('query', '')
start = int(request.args.get('start', '0'))
length = int(request.args.get('length', '20'))
logging.info('Searching for \'{}\''.format(query))
results = search(query, DICTIONARY_PATH, INDEXINFO_PATH)
detailed_list = generate_detailed_list(results, query, start, length)
# TODO render template with arguments: len(results), next_link,
# detailed_list, query
count = len(results)
if count >= RANKER_K:
count = 'more than {}'.format(count)
return render_template('search.html',
query=query,
results=detailed_list,
start=start,
length=length,
count=count
)
| {
"repo_name": "moritzschaefer/the-search-engine",
"path": "tsg/frontend/__init__.py",
"copies": "1",
"size": "2362",
"license": "mit",
"hash": -8730286575539771000,
"line_mean": 27.8048780488,
"line_max": 73,
"alpha_frac": 0.6265876376,
"autogenerated": false,
"ratio": 3.9630872483221475,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5089674885922147,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import logging
from . import process
from .utils import which, tempdir
from ._compat import *
def ensure_git(return_value=None):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
if which('git'):
return func(*args, **kwargs)
else:
logging.error('git is not installed')
return return_value
return wrapper
return decorator
@ensure_git()
def clone(url, dest=None, depth=None):
if dest and os.path.exists(dest):
raise FileExistsError('Destination already exists: %s' % dest)
dest = dest if dest else tempdir()
cmd = ['git', 'clone', url, dest]
if depth:
cmd += ['--depth', depth]
process.call(cmd)
return dest
class Repository(object):
def __init__(self, path, autopull=None, autopush=None):
self.path = path
self.autopush = autopush
self.autopull = autopull
if autopull:
self.pull_rebase(*autopull)
@ensure_git()
def init(self):
cmd = ['git', 'init', self.path]
process.call(cmd)
@ensure_git()
def pull_rebase(self, remote='origin', branch='master'):
cmd = ['git', 'pull', '--rebase', remote, branch]
process.call(cmd, cwd=self.path)
@ensure_git()
def push(self, remote='origin', branch='master'):
cmd = ['git', 'push', remote, branch]
process.call(cmd, cwd=self.path)
@ensure_git()
def add(self, all=False):
if all is True:
cmd = ['git', 'add', '--all', '.']
else:
cmd = ['git', 'add', '.']
process.call(cmd, cwd=self.path)
@ensure_git()
def commit(self, message, add=True):
if add:
self.add(all=True)
cmd = ['git', 'commit', '-m', message]
process.call(cmd, cwd=self.path)
if self.autopush:
self.push()
@ensure_git(return_value=[])
def commit_list(self):
cmd = ['git', 'log', '--reverse', '--pretty=format:%s']
output, _ = process.call(cmd, cwd=self.path)
return output.splitlines()
@ensure_git(return_value=[])
def sha_list(self):
cmd = ['git', 'log', '--reverse', '--pretty=format:%h']
output, _ = process.call(cmd, cwd=self.path)
return output.splitlines()
@ensure_git()
def reset(self, to_index):
try:
sha = self.sha_list()[to_index]
cmd = ['git', 'reset', '--hard', sha]
process.call(cmd, cwd=self.path)
except IndexError:
logging.info('commit on index "{}" not found'.format(to_index))
| {
"repo_name": "eiginn/passpie",
"path": "passpie/history.py",
"copies": "1",
"size": "2665",
"license": "mit",
"hash": -454008621583621900,
"line_mean": 27.3510638298,
"line_max": 75,
"alpha_frac": 0.5500938086,
"autogenerated": false,
"ratio": 3.6657496561210454,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47158434647210457,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.