text stringlengths 0 1.05M | meta dict |
|---|---|
from functools import wraps
from os.path import join as path_join
from urllib.parse import urljoin, urlparse, urlunparse
from flask import current_app, request
def jsonp(func):
"""Wraps JSONified output for JSONP requests."""
@wraps(func)
def decorated(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
data = str(func(*args, **kwargs).data)
content = str(callback) + '(' + data + ')'
mimetype = 'application/javascript'
return current_app.response_class(content, mimetype=mimetype)
else:
return func(*args, **kwargs)
return decorated
def absolute_url(path: str='') -> str:
# ensure that "path" (see urlparse result) part of url has both leading and trailing slashes
conf_base_url = urlunparse([(x if i != 2 else path_join('/', x, ''))
for i, x in enumerate(urlparse(current_app.config.get('BASE_URL', '/')))])
try:
base_url = urljoin(request.base_url, conf_base_url)
except RuntimeError: # Working outside of request context
base_url = conf_base_url
return urljoin(base_url, path.lstrip('/'))
| {
"repo_name": "guardian/alerta",
"path": "alerta/utils/response.py",
"copies": "1",
"size": "1196",
"license": "apache-2.0",
"hash": -1715189264333980700,
"line_mean": 37.5806451613,
"line_max": 106,
"alpha_frac": 0.6237458194,
"autogenerated": false,
"ratio": 4.081911262798635,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5205657082198635,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from os.path import join
import docker
from requests.exceptions import ReadTimeout
from flask import current_app, request, Response, Blueprint
from werkzeug import secure_filename
from .exceptions import BuildException
from .utils import docker_build, check_auth, check_image, mktmpdir
c = docker.Client(base_url='unix://var/run/docker.sock')
build = Blueprint('build', __name__, static_folder='../static')
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
header = {'WWW-Authenticate': 'Basic realm="Login Required"'}
return Response('Bad username or password', 401, header)
return f(*args, **kwargs)
return decorated
@build.route('/', methods=['GET'])
def index():
return build.send_static_file('index.html')
@build.route('/', methods=['POST'])
@build.route('/<image>/<output>', methods=['POST'])
@requires_auth
def run_build(image=None, output=None):
image = request.form.get('image', image)
output = request.form.get('output', output)
inputfile = request.files['input']
if not image:
return 'Missing image name', 400
if not output:
return 'Missing output file', 400
if not inputfile:
return 'Missing input file', 400
if not check_image(request.authorization.username, image):
return 'Image not allowed', 403
with mktmpdir() as tempdir:
inputfile.save(join(tempdir, secure_filename(inputfile.filename)))
try:
response = Response(docker_build(c, tempdir, output, image))
cd = 'attachment; filename="%s"' % output
response.headers['Content-Disposition'] = cd
response.mimetype = 'application/octet-stream'
return response
except docker.errors.APIError as e:
if 'could not find the file' in e.explanation.lower():
err = 'Could not find file %s' % output
return err, 400
else:
return e.explanation, 500
except ReadTimeout as e:
err = ('Timeout: Build did not complete after %d seconds' %
current_app.config['CONTAINER_TIMEOUT'])
return err, 400
except BuildException as e:
return str(e), 400
| {
"repo_name": "Mattias-/mym",
"path": "mym/build.py",
"copies": "1",
"size": "2389",
"license": "mit",
"hash": -2094096206689067300,
"line_mean": 34.1323529412,
"line_max": 74,
"alpha_frac": 0.6358308916,
"autogenerated": false,
"ratio": 4.205985915492958,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5341816807092958,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from pastry.models import User
from flask import request, abort, jsonify
def parse_api_key():
key = None
if request.args.get('api_key'):
key = request.args.get('api_key')
elif request.form.get('api_key'):
key = request.form.get('api_key')
return key
def login_required(f):
@wraps(f)
def decorated(*args, **kwargs):
if request.method != 'OPTIONS':
# Verify auth-token or api_key is present
token = request.headers.get('Auth-Token')
api_key = parse_api_key()
if not token and not api_key:
abort(401)
# Verify key/token
if api_key:
if not User.verify_api_key(api_key):
response = jsonify({
'message': 'Invalid API Key',
'invalid_api_key': True,
})
response.status_code = 401
return response
elif token:
if not User.verify_auth_token(token):
response = jsonify({
'message': 'Expired Token',
'expired_token': True,
})
response.status_code = 401
return response
return f(*args, **kwargs)
return decorated
| {
"repo_name": "cloughrm/Flask-Angular-Template",
"path": "backend/pastry/resources/auth.py",
"copies": "1",
"size": "1383",
"license": "mit",
"hash": 2225794611409529900,
"line_mean": 29.0652173913,
"line_max": 53,
"alpha_frac": 0.4873463485,
"autogenerated": false,
"ratio": 4.625418060200669,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5612764408700669,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from path import path as ppath
import flask
from flaskext.openid import OpenID, COMMON_PROVIDERS
from sqlitedict import SqliteDict
oid = OpenID()
auth = flask.Blueprint('auth', __name__)
def _get_users_db():
return flask.current_app.extensions['auth-users']['usersdb']
def lookup_current_user():
flask.g.user = None
flask.g.user_id = flask.session.get('openid_url')
if flask.g.user_id is not None:
users_db = _get_users_db()
flask.g.user = users_db.get(flask.g.user_id)
@auth.route('/login', methods=['GET', 'POST'])
@oid.loginhandler
def login():
if flask.g.user is not None:
return flask.redirect(oid.get_next_url())
return oid.try_login(COMMON_PROVIDERS['google'],
ask_for=['email', 'fullname', 'nickname'])
@oid.after_login
def update_user(resp):
users_db = _get_users_db()
flask.g.user_id = flask.session['openid_url'] = resp.identity_url
flask.g.user = users_db.get(flask.g.user_id) or {}
flask.g.user.update({
'name': resp.fullname or resp.nickname or u"",
'email': resp.email,
})
save_user()
return flask.redirect(oid.get_next_url())
def save_user():
assert flask.g.user_id
assert flask.g.user
users_db = _get_users_db()
users_db[flask.g.user_id] = flask.g.user
def require_login(func):
@wraps(func)
def wrapper(*args, **kwargs):
if flask.g.user is None:
flask.abort(403)
return func(*args, **kwargs)
return wrapper
@auth.route('/logout')
def logout():
flask.session.pop('openid_url', None)
return flask.redirect(oid.get_next_url())
def register(app):
app.register_blueprint(auth)
oid.init_app(app)
instance_path = ppath(app.instance_path)
oid.fs_store_path = instance_path/'openid-store'
app.before_request(lookup_current_user)
app.extensions['auth-users'] = {
'usersdb': SqliteDict(instance_path/'users.db', autocommit=True),
}
| {
"repo_name": "eaudeweb/natura2000db",
"path": "auth.py",
"copies": "1",
"size": "2004",
"license": "bsd-3-clause",
"hash": -5015305180744380000,
"line_mean": 24.3670886076,
"line_max": 73,
"alpha_frac": 0.6412175649,
"autogenerated": false,
"ratio": 3.2115384615384617,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4352756026438462,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from plumeria import config
from plumeria.config import set_of
from plumeria.command import AuthorizationError, CommandError
owner_ids = config.create("perms", "admin_users", set_of(str), fallback="")
config.add(owner_ids)
def has_role_in(user, roles):
for role in user.roles:
if role.name in roles:
return True
return False
def is_owner(user):
return user.id in owner_ids()
def is_server_admin(user):
for role in user.roles:
if role.name == "bot-admin":
return True
return False
def owners_only(f):
@wraps(f)
async def wrapper(message, *args, **kwargs):
if is_owner(message.author):
return await f(message, *args, **kwargs)
else:
raise AuthorizationError()
wrapper.owners_only = True
return wrapper
def server_admins_only(f):
@wraps(f)
async def wrapper(message, *args, **kwargs):
if is_owner(message.author) or is_server_admin(message.author):
return await f(message, *args, **kwargs)
else:
raise AuthorizationError()
wrapper.server_admins_only = True
return wrapper
def roles_only(*roles):
roles = set(roles)
def inner(f):
@wraps(f)
async def wrapper(message, *args, **kwargs):
if is_owner(message.author) or has_role_in(message.author, roles):
return await f(message, *args, **kwargs)
else:
raise AuthorizationError()
wrapper.server_admins_only = True
return wrapper
return inner
def have_all_perms(*perms):
def decorator(f):
@wraps(f)
async def wrapper(message, *args, **kwargs):
if not hasattr(message.author, "roles"):
raise AuthorizationError("Permission data has not been loaded.")
resolved = message.channel.permissions_for(message.author)
if not resolved:
raise AuthorizationError("This command cannot be used here because there is no permission information.")
missing = set()
for perm in perms:
if not getattr(resolved, perm):
missing.add(perm)
if len(missing):
raise AuthorizationError("Missing the following one or more permissions: {}".format(", ".join(perms)))
return await f(message, *args, **kwargs)
wrapper.server_admins_only = True
return wrapper
return decorator
def direct_only(f):
"""Make sure that the command isn't being run in an alias."""
@wraps(f)
async def wrapper(message, *args, **kwargs):
if not message.direct:
raise CommandError("This command cannot be run from an alias.")
return await f(message, *args, **kwargs)
wrapper.direct_only = True
return wrapper
| {
"repo_name": "sk89q/Plumeria",
"path": "plumeria/perms.py",
"copies": "1",
"size": "2874",
"license": "mit",
"hash": -5195527384829551000,
"line_mean": 26.9029126214,
"line_max": 120,
"alpha_frac": 0.6113430759,
"autogenerated": false,
"ratio": 4.201754385964913,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5313097461864913,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from pprint import pprint
import json
try:
from urllib.parse import quote as url_quote
except:
from urllib import quote as url_quote
from flask import Blueprint, g, session, render_template, url_for, request, jsonify
from controllers.helpers import (lti, get_assignments_from_request, strip_tags,
get_lti_property, get_course_id, get_assignment_id)
from main import app
import controllers.maze as maze
import controllers.explain as explain
import controllers.blockpy as blockpy
import controllers.corgis as corgis
import controllers.poll as poll
from models.models import (db, Assignment, AssignmentGroup, User, Course)
blueprint_assignments = Blueprint('assignments', __name__, url_prefix='/assignments')
@blueprint_assignments.route('/', methods=['GET', 'POST'])
@blueprint_assignments.route('/load', methods=['GET', 'POST'])
@lti(request='initial')
def load(lti, lti_exception=None):
assignments, submissions = get_assignments_from_request()
if not assignments:
return jsonify(success=False, message="Assignment not found")
embed = request.values.get('embed', 'false') == 'True'
# Use the proper template
if assignments[0].type == 'maze':
return maze.load(assignments=assignments, submissions=submissions, lti=lti,embed=embed)
elif assignments[0].type == 'explain':
return explain.load(assignments=assignments, submissions=submissions, lti=lti, embed=embed)
elif assignments[0].type == 'poll':
return poll.load(assignments=assignments, submissions=submissions, lti=lti, embed=embed)
elif assignments[0].type in ('corgis (visualizer)', 'visualizer'):
return corgis.redirect_language_index(language='visualizer', assignments=assignments, submissions=submissions, lti=lti, embed=embed)
else:
return blockpy.load(assignments=assignments, submissions=submissions, lti=lti, embed=embed)
@blueprint_assignments.route('/new/', methods=['GET', 'POST'])
@blueprint_assignments.route('/new', methods=['GET', 'POST'])
@get_course_id
def new_assignment(course_id, lti=lti):
name = request.values.get('name', None) or None
level = request.values.get('level', None) or None
menu = request.values.get('menu', "select")
#TODO: change "normal" type to "blockpy"
type = request.values.get('type', "normal")
assignment = Assignment.new(owner_id=g.user.id, course_id=course_id, type=type, name=name, level=level)
launch_type = 'lti_launch_url' if menu != 'embed' else 'iframe'
endpoint = 'assignments.load'
return jsonify(success=True,
redirect=url_for('assignments.load', assignment_id=assignment.id),
id= assignment.id,
name= assignment.name,
type= type,
body= strip_tags(assignment.body)[:255],
title= assignment.title(),
view = url_for('assignments.load', assignment_id=assignment.id, embed= menu == 'embed'),
select = url_quote(url_for(endpoint, assignment_id=assignment.id, _external=True, embed= menu == 'embed'))+"&return_type="+launch_type+"&title="+url_quote(assignment.title())+"&text=BlockPy%20Exercise&width=100%25&height=600",
edit= url_for('assignments.load', assignment_id=assignment.id, course_id=assignment.course_id),
date_modified = assignment.date_modified.strftime(" %I:%M%p on %a %d, %b %Y").replace(" 0", " "))
@blueprint_assignments.route('/remove/', methods=['GET', 'POST'])
@blueprint_assignments.route('/remove', methods=['GET', 'POST'])
def remove_assignment(lti=None):
assignment_id = request.values.get('assignment_id', None)
if assignment_id == None:
return jsonify(success=False, message="Need assignment_id.")
assignment = Assignment.by_id(int(assignment_id))
if not g.user.is_instructor(assignment.course_id):
return jsonify(success=False, message="You are not an instructor in this assignment's course.")
Assignment.remove(assignment.id)
return jsonify(success=True)
@blueprint_assignments.route('/get/', methods=['GET', 'POST'])
@blueprint_assignments.route('/get', methods=['GET', 'POST'])
def get_assignment(lti=lti):
'''
Returns metadata about the assignment.
'''
# TODO: Security hole, evil instructors could remove assignments outside of their course
assignment = Assignment.by_id(assignment_id)
return jsonify(success=True, url=assignment.url, name=assignment.name,
body= strip_tags(assignment.body)[:255],
on_run=assignment.on_run,
title= assignment.title(),
answer=assignment.answer, type=assignment.type,
visibility=assignment.visibility, disabled=assignment.disabled,
mode=assignment.mode, version=assignment.version,
id=assignment.id, course_id=assignment.course_id,
date_modified = assignment.date_modified.strftime(" %I:%M%p on %a %d, %b %Y").replace(" 0", " "))
@blueprint_assignments.route('/move_course/', methods=['GET', 'POST'])
@blueprint_assignments.route('/move_course', methods=['GET', 'POST'])
def move_course(lti=None):
assignment_id = request.values.get('assignment_id', None)
new_course_id = request.values.get('new_course_id', None)
if None in (assignment_id, new_course_id):
return jsonify(success=False, message="Need assignment_id and new_course_id.")
assignment = Assignment.by_id(int(assignment_id))
if not g.user.is_instructor(assignment.course_id):
return jsonify(success=False, message="You are not an instructor in this assignment's course.")
if not g.user.is_instructor(int(new_course_id)):
return jsonify(success=False, message="You are not an instructor in the new course.")
assignment.move_course(new_course_id)
return jsonify(success=True)
@blueprint_assignments.route('/edit_assignment/<int:assignment_id>/', methods=['GET', 'POST'])
@blueprint_assignments.route('/edit_assignment/<int:assignment_id>', methods=['GET', 'POST'])
def edit_assignment(assignment_id, lti=lti):
assignment = Assignment.by_id(assignment_id)
if not assignment:
return "Assignment ID not found"
if not g.user.is_instructor(assignment.course_id):
return jsonify(success=False, message="You are not an instructor in this course.")
submission = assignment.get_submission(g.user.id)
return render_template('lti/edit.html',
assignment=assignment,
submission=submission,
user_id=g.user.id,
context_id=assignment.course_id)
@blueprint_assignments.route('/batch_edit/', methods=['GET', 'POST'])
@blueprint_assignments.route('/batch_edit', methods=['GET', 'POST'])
def batch_edit(lti=lti):
user, roles, course = ensure_canvas_arguments()
if not g.user.is_instructor(g.course.id):
return jsonify(success=False, message="You are not an instructor in this course.")
assignments = Assignment.by_course(course.id)
return render_template('lti/batch.html',
assignments=assignments,
user_id=user.id,
context_id=course.id)
@blueprint_assignments.route('/check_assignments/', methods=['GET', 'POST'])
@blueprint_assignments.route('/check_assignments', methods=['GET', 'POST'])
def check_assignments(lti=lti):
""" An AJAX endpoint for listing any new assignments.
Unused.
"""
assignments = Assignment.by_course(g.course.id)
return jsonify(success=True, assignments=[a.to_dict() for a in assignments])
@blueprint_assignments.route('/select/', methods=['GET', 'POST'])
@blueprint_assignments.route('/select', methods=['GET', 'POST'])
@lti(request='initial')
def select(lti, menu='select'):
""" Let's the user select from a list of assignments.
"""
# Store current user_id and context_id
assignments = Assignment.get_available()
groups = AssignmentGroup.query.all()
return_url = get_lti_property('launch_presentation_return_url')
course_groups = Course.get_all_groups()
editable_courses = g.user.get_editable_courses()
return render_template('lti/select.html', assignments=assignments, groups=groups, return_url=return_url, menu=menu, editable_courses=editable_courses, course_groups=course_groups)
@blueprint_assignments.route('/select_embed/', methods=['GET', 'POST'])
@blueprint_assignments.route('/select_embed', methods=['GET', 'POST'])
@lti(request='initial')
def select_embed(lti):
""" Let's the user select from a list of assignments.
"""
return select(menu='embed', lti=lti)
def process_assignments(assignments, user_id, course_id):
id_map = {}
for assignment in assignments:
id = assignment['id']
a = Assignment(name=assignment['name'],
body=assignment['body'],
give_feedback=assignment['on_run'],
starting_code=assignment['on_start'],
type='blockpy',
visibility=assignment['visibility'],
disabled=assignment['disabled'],
mode=assignment['mode'],
owner_id = user_id,
course_id = course_id,
version = assignment['version'],
)
db.session.add(a)
db.session.commit()
id_map[id] = a.id
return id_map
@blueprint_assignments.route('/bulk_upload/', methods=['GET', 'POST'])
@blueprint_assignments.route('/bulk_upload', methods=['GET', 'POST'])
def bulk_upload():
course_id = request.values.get('course_id', None)
if course_id is None:
return jsonify(success=False, message="No course id")
if not g.user.is_instructor(int(course_id)):
return jsonify(success=False, message="Not an instructor in this course")
if request.method == 'POST':
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file:
return json.dumps(process_assignments(json.loads(file.read()), user_id=g.user.id, course_id=course_id))
else:
return '''
<!doctype html>
<title>Upload assignments (JSON)</title>
<h1>Upload assignments (JSON)</h1>
<form action="" method=post enctype=multipart/form-data>
<p><input type=file name=file>
<input type=submit value=Upload>
</form>
'''
@blueprint_assignments.route('/images/<path:path>', methods=['GET', 'POST'])
def assignments_static_images(path):
return app.send_static_file('images/'+path) | {
"repo_name": "RealTimeWeb/Blockpy-Server",
"path": "controllers/assignments.py",
"copies": "1",
"size": "11052",
"license": "mit",
"hash": -8451600509893115000,
"line_mean": 46.4377682403,
"line_max": 245,
"alpha_frac": 0.6445892146,
"autogenerated": false,
"ratio": 3.9741100323624594,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5118699246962459,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
# from pprint import pprint
from tornado.web import HTTPError
from rima.exceptions import (
ResourceNotFoundError, ConflictError, MissingParameterError,
UnknownError
)#, FormatError, UnknownError
# def debug(function):
# @wraps(function)
# def debug_wrapper(*args, **kwargs):
# print("IN DEBUG WRAPPER")
# output = {
# "function": function.__name__,
# "args": args,
# "kwargs": kwargs,
# }
# result = function(*args, **kwargs)
# output.update({"result": result})
# print("-"*100)
# pprint(output, indent=4, depth=4)
# print("-"*100)
# return result
# return debug_wrapper
# @debug
def error_handler(method):
@wraps(method)
def error_handler_wrapper(self, *args, **kwargs):
print((self, args, kwargs))
try:
result = method(self, *args, **kwargs)
print("result")
print(result)
self.write(result)
except MissingParameterError as error:
raise HTTPError(400)
except KeyError as error:
raise HTTPError(400)
except ResourceNotFoundError as error:
raise HTTPError(404)
except ConflictError as error:
raise HTTPError(409)
except UnknownError as error:
raise HTTPError(500)
except Exception as error:
raise HTTPError(500)
return error_handler_wrapper
| {
"repo_name": "zoufou/rima",
"path": "rima/decorators.py",
"copies": "2",
"size": "1534",
"license": "apache-2.0",
"hash": 9144341437423627000,
"line_mean": 25,
"line_max": 64,
"alpha_frac": 0.5697522816,
"autogenerated": false,
"ratio": 4.272980501392758,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5842732782992758,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from pprint import pprint
import sys
from flask import request, json
from theroot.users_bundle.helpers.current_user_helper import CurrentUserHelper
from theroot.users_bundle.helpers.users_and_roles import get_user_roles
'''
This module provides a function decorator to use with your routing functions that allows to establish
who has access to a given resource. For the sake of clarity I associate some constants to the numbers that represent
each one of the access levels.
'''
# Constants representing the possible parameters passed into the decorator
ADMINISTRATOR_ONLY = 0
CURRENT_USER_ONLY = 1
ADMINISTRATOR_OR_CURRENT_USER = 2
ALL_REGISTERED_USERS = 3
# Constants representing the roles in the table
ADMINISTRATOR = 0
USER = 1
# This is how we create function decorators in Python, which are used in order
# to modify the behavior of a function at run type.
# I use this pattern in order to separate the ACL logic from the "front-end" controller,
# making the code more solid.
def router_acl(user_type):
def router_acl_decorator(fn):
@wraps(fn) # it basically updates the context with the new function, variables, etc.
def func_wrapper(*args, **kwargs):
current_user = CurrentUserHelper()
if request.method == 'GET':
if user_type == CURRENT_USER_ONLY:
if current_user.id == int(request.args.get('user_id')):
return fn()
else:
# you can test this by changing status to whatever you like and
# then trying to connect to a route with
# a wrong user id e.g. http://localhost:5001/api/user/edit?id=24
response = json.jsonify({"status": "fail"})
response.status_code = 403
return response
elif user_type == ADMINISTRATOR_ONLY:
roles = get_user_roles(current_user.id)
print('Let\'s print the user\'s roles')
pprint(roles)
if ADMINISTRATOR in roles:
return fn()
else:
response = json.jsonify({"status": "fail"})
response.status_code = 403
return response
elif user_type == ADMINISTRATOR_OR_CURRENT_USER:
roles = get_user_roles(current_user.id)
if ADMINISTRATOR in roles or current_user.id == int(request.args.get('user_id')):
return fn()
else:
response = json.jsonify({"status": "fail"})
response.status_code = 403
return response
elif user_type == ALL_REGISTERED_USERS:
roles = get_user_roles(current_user.id)
if USER in roles:
return fn()
else:
response = json.jsonify({"status": "fail"})
response.status_code = 403
return response
# this is a fallback in case no valid type is provided
else:
response = json.jsonify({"status": "fail"})
response.status_code = 400
return response
elif request.method == 'POST':
if user_type == CURRENT_USER_ONLY:
print('let\'s print the request')
pprint(request.json)
print('let\'s print the current_user')
pprint(current_user)
if current_user.id == int(request.json['data']['id']):
return fn()
else:
response = json.jsonify({"status": "fail"})
response.status_code = 403
return response
elif user_type == ADMINISTRATOR_ONLY:
roles = get_user_roles(current_user.id)
print('Let\'s print the user\'s roles')
pprint(roles)
if ADMINISTRATOR in roles:
return fn()
else:
response = json.jsonify({"status": "fail"})
response.status_code = 403
return response
elif user_type == ADMINISTRATOR_OR_CURRENT_USER:
roles = get_user_roles(current_user.id)
if ADMINISTRATOR in roles or current_user.id == int(request.json['data']['id']):
return fn()
else:
response = json.jsonify({"status": "fail"})
response.status_code = 403
return response
elif user_type == ALL_REGISTERED_USERS:
roles = get_user_roles(current_user.id)
if USER in roles:
return fn()
else:
response = json.jsonify({"status": "fail"})
response.status_code = 403
return response
# this is a fallback in case no valid type is provided
else:
response = json.jsonify({"status": "fail"})
response.status_code = 400
return response
return func_wrapper
return router_acl_decorator
| {
"repo_name": "Deviad/adhesive",
"path": "theroot/users_bundle/helpers/router_acl.py",
"copies": "1",
"size": "5629",
"license": "mit",
"hash": -6972473347590904000,
"line_mean": 42.6356589147,
"line_max": 116,
"alpha_frac": 0.506128975,
"autogenerated": false,
"ratio": 5.089511754068716,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000639765008772168,
"num_lines": 129
} |
from functools import wraps
from .process_kwargs import process_kwargs
def reducer_wrapper(process_data=None, defaults_process=None, defaults_data=None):
def decorator(func):
@wraps(func)
def wrapper(argument, **kwargs):
kwargs_process = {}
kwargs_data = {}
#: check if argument is a flask request
if hasattr(argument, 'get_json'):
kwargs = argument.args
data_in = [d['data'] for d in argument.get_json()]
else:
data_in = argument
if defaults_process is not None:
kwargs_process = process_kwargs(kwargs, defaults_process)
if defaults_data is not None:
kwargs_data = process_kwargs(kwargs, defaults_data)
if process_data is not None:
data = process_data(data_in, **kwargs_process)
else:
data = data_in
return func(data, **kwargs_data)
#: keep the orignal function around for testing
#: and access by other reducers
wrapper._original = func
return wrapper
return decorator
| {
"repo_name": "CKrawczyk/python-reducers-for-caesar",
"path": "panoptes_aggregation/reducers/reducer_wrapper.py",
"copies": "1",
"size": "1161",
"license": "apache-2.0",
"hash": -7510491724142544000,
"line_mean": 37.7,
"line_max": 82,
"alpha_frac": 0.5693367786,
"autogenerated": false,
"ratio": 4.58893280632411,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00040160642570281126,
"num_lines": 30
} |
from functools import wraps
from proso.func import function_name
import inspect
import logging
import time
import uuid
LOGGER = logging.getLogger('django.request')
TIMERS = {}
def timer(name):
now = time.time()
diff = None
if name in TIMERS:
diff = now - TIMERS[name]
TIMERS[name] = now
return diff
class timeit:
def __init__(self, name=None):
if name is None:
name = 'unknown'
self._name = name
self._id = str(uuid.uuid1())
def __call__(self, function):
@wraps(function)
def timed(*args, **kw):
ts = time.time()
result = function(*args, **kw)
te = time.time()
current_frame = inspect.currentframe()
call_frame = inspect.getouterframes(current_frame, 3)
LOGGER.debug('[TIMEIT] .../%s:%s -> %r took %2.2f seconds' % ('/'.join(call_frame[1][1].split('/')[-2:]), call_frame[1][2], function_name(function), te - ts))
return result
return timed
def __enter__(self):
current_frame = inspect.currentframe()
self._call_frame = inspect.getouterframes(current_frame, 2)
timer(self._id)
def __exit__(self, exc_type, exc_value, traceback):
LOGGER.debug('[TIMEIT] .../%s:%s -> %r took %2.2f seconds' % ('/'.join(self._call_frame[1][1].split('/')[-2:]), self._call_frame[1][2], self._name, timer(self._id)))
| {
"repo_name": "adaptive-learning/proso-apps",
"path": "proso/time.py",
"copies": "1",
"size": "1426",
"license": "mit",
"hash": 6036060935783767000,
"line_mean": 27.52,
"line_max": 173,
"alpha_frac": 0.5694249649,
"autogenerated": false,
"ratio": 3.5472636815920398,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9615529138526724,
"avg_score": 0.00023190159306311756,
"num_lines": 50
} |
from functools import wraps
from pymongo.cursor import Cursor as OrigCursor
class Cursor(object):
@classmethod
def mk_list(cls, collection, o_cursor_list):
return [cls(collection, o_cursor)
for o_cursor in o_cursor_list]
def __init__(self, collection, o_cursor):
self.collection = collection
self.o_cursor = o_cursor
def decorate_method(self, f):
@wraps(f)
def decorated_function(*args, **kwargs):
rtn = f(*args, **kwargs)
if isinstance(rtn, OrigCursor):
return Cursor(self.collection, rtn)
return rtn
return decorated_function
def __getattr__(self, name):
rtn = getattr(self.o_cursor, name)
if isinstance(rtn, dict):
return self.collection._boxing(rtn)
elif callable(rtn):
return self.decorate_method(rtn)
return rtn
def __getitem__(self, index):
rtn = self.o_cursor.__getitem__(index)
if isinstance(rtn, dict):
return self.collection._boxing(rtn)
elif isinstance(rtn, OrigCursor):
return Cursor(self.collection, rtn)
elif callable(rtn):
return self.decorate_method(rtn)
return rtn
def __iter__(self):
return self
def next(self):
rtn = self.o_cursor.next()
if isinstance(rtn, dict):
return self.collection._boxing(rtn)
return rtn
__next__ = next
| {
"repo_name": "CrowsT/MongoNorm",
"path": "mongonorm/cursor.py",
"copies": "1",
"size": "1484",
"license": "bsd-2-clause",
"hash": 3844238688854816300,
"line_mean": 28.0980392157,
"line_max": 51,
"alpha_frac": 0.5795148248,
"autogenerated": false,
"ratio": 3.89501312335958,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9974527948159579,
"avg_score": 0,
"num_lines": 51
} |
from functools import wraps
from pyramid.httpexceptions import HTTPBadRequest
def assert_condition_returned(condition, error_message='', exception=HTTPBadRequest):
'''
Decorator for checking return value of function. Results will
be passed into condition function and error raised if True.
'''
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if condition(result):
msg = '{} {}'.format(error_message, result)
raise exception(
explanation=msg
)
return result
return wrapper
return decorator
def assert_none_returned(error_message):
return assert_condition_returned(
condition=lambda result: bool(result),
error_message=error_message
)
def assert_one_or_none_returned(error_message):
return assert_condition_returned(
condition=lambda result: len(result) > 1,
error_message=error_message
)
def assert_one_returned(error_message):
return assert_condition_returned(
condition=lambda result: len(result) != 1,
error_message=error_message
)
def assert_something_returned(error_message):
return assert_condition_returned(
condition=lambda result: len(result) == 0,
error_message=error_message
)
def deduplicate(func):
@wraps(func)
def wrapper(*args, **kwargs):
r = func(*args, **kwargs)
return list(set(r))
return wrapper
def remove_from_return(keys=[], values=[]):
'''
Removes dict item if it matches key or value.
'''
def remove_from_return_decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
r = func(*args, **kwargs)
if isinstance(r, dict):
keys_to_remove = []
for k, v in r.items():
if k in keys or v in values:
keys_to_remove.append(k)
# Avoid mutating while iterating over.
for k in keys_to_remove:
r.pop(k, None)
return r
return wrapper
return remove_from_return_decorator
def catch_and_swap(catch=Exception, swap=None, details=None):
'''
Catch given exception and raise new exception instead.
'''
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
result = func(*args, **kwargs)
except catch as e:
if not swap:
raise e
raise swap(details)
else:
return result
return wrapper
return decorator
| {
"repo_name": "ENCODE-DCC/snovault",
"path": "src/snovault/elasticsearch/searches/decorators.py",
"copies": "1",
"size": "2730",
"license": "mit",
"hash": -6082921946280025000,
"line_mean": 26.8571428571,
"line_max": 85,
"alpha_frac": 0.5732600733,
"autogenerated": false,
"ratio": 4.490131578947368,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.010322733744660655,
"num_lines": 98
} |
from functools import wraps
from pyramid.httpexceptions import (
HTTPMethodNotAllowed,
HTTPForbidden,
HTTPUnauthorized
)
def not_allowed(self):
raise HTTPMethodNotAllowed
class View:
def __init__(self, context, request):
self.context = context
self.request = request
get = post = put = delete = not_allowed
def options(self):
# allow the js client to be on a different origin
self.request.response.headers['Access-Control-Allow-Origin'] = '*'
return {
'permissions': self.get_permissions()
}
def get_permissions(self):
# XXX: test this
try:
acls = self.context.__acl__
except AttributeError:
return []
else:
if callable(acls):
acls = acls()
return [
perm for perm
in set(acl[2] for acl in acls)
if self.request.has_permission(perm)
]
def __call__(self):
method = self.request.method
if method == 'GET':
return self.get()
elif method == 'POST':
return self.post()
elif method == 'PUT':
return self.put()
elif method == 'DELETE':
return self.delete()
elif method == 'OPTIONS':
return self.options()
else:
raise HTTPMethodNotAllowed
@property
def db(self):
return self.request.db
@property
def user(self):
return self.request.authenticated_userid
def check_permission(self, perm):
has_perm = self.request.has_permission(perm)
if not has_perm:
raise HTTPForbidden('no permission for {}'.format(perm))
def add_view_count(self, dest=None):
if dest is None:
dest = self.context
assert dest is not None
if self.user is None:
self.request.ts.article_viewed_by_ip(
self.context,
self.request.client_addr)
else:
self.request.ts.article_viewed_by_user(
self.context,
self.user)
class require_permission:
def __init__(self, permission):
self.permission = permission
def __call__(reqp, function):
permission = reqp.permission
@wraps(function)
def wrapper(self, *args, **kwargs):
if not self.request.has_permission(permission):
if self.user is None:
raise HTTPUnauthorized('authentication required')
else:
raise HTTPForbidden(
'no permission for {}'.format(permission))
return function(self, *args, **kwargs)
wrapper.requires_permission = permission
return wrapper
| {
"repo_name": "nthuion/nthuion-backend",
"path": "nthuion/views/base.py",
"copies": "1",
"size": "2829",
"license": "bsd-2-clause",
"hash": -3369127834899454500,
"line_mean": 26.2019230769,
"line_max": 74,
"alpha_frac": 0.5464828561,
"autogenerated": false,
"ratio": 4.6150081566068515,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5661491012706852,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from pyramid.response import Response
from .exceptions import (ValidationError,
MissingFieldError, InvalidFieldValueError)
import logging
LOG = logging.getLogger(__name__)
def validator(verify_func):
""" Decorates a view function and perform per-view
specific input validation.
Args:
verify_func: Pass the function to this validator
so the validator will call the function to
validate the input data.
Returns:
At the end the view function context is returned
and calls the view function with the request.
"""
def decorator(view_func):
@wraps(view_func)
def wrapper(cls):
if cls.request.method == "POST":
data = cls.request.json_body
cleaned = verify_func(data)
setattr(cls.request, "cleaned_data", cleaned)
return view_func(cls)
return wrapper
return decorator
def has_required_fields(required_fields, actual_fields):
""" Checks if input data contains all the required
fields.
Args:
required_fields: A list of required fields
input must have.
actual_fields: A list of the fields present
in the input. This is usually the keys
of the input data.
Returns:
Returns ``True`` if all fields are present
or ``False`` if at least one field is missing.
"""
for field in required_fields:
if field not in actual_fields:
raise MissingFieldError(field,
"Missing required field: %s" % field)
def check_create_poll(data):
expected_fields = ("name", "options")
has_required_fields(expected_fields, data.keys())
# options cannot be an empty string. There must
# be at least 1 non-empty string
splited = data["options"].split(",")
if splited:
return {"name": data["name"], "options": splited}
else:
raise InvalidFieldValueError("options",
"Must be a non-empty string. Multiple options must be\
separated by comma.", value=data["options"])
def check_vote(data):
expected_fields = ("option", "ip")
has_required_fields(expected_fields, data.keys())
try:
index = int(data["option"])
except ValueError:
raise InvalidFieldValueError("option",
"Option must be an integer", value=data["option"])
return {"option": index, "ip": data["ip"]}
| {
"repo_name": "yeukhon/poll-app",
"path": "pollapp/validation.py",
"copies": "1",
"size": "2460",
"license": "mpl-2.0",
"hash": 6925061680785272000,
"line_mean": 31.3684210526,
"line_max": 66,
"alpha_frac": 0.6304878049,
"autogenerated": false,
"ratio": 4.416517055655296,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5547004860555296,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from pyrsistent._pmap import PMap, pmap
from pyrsistent._pset import PSet, pset
from pyrsistent._pvector import PVector, pvector
def freeze(o, strict=True):
"""
Recursively convert simple Python containers into pyrsistent versions
of those containers.
- list is converted to pvector, recursively
- dict is converted to pmap, recursively on values (but not keys)
- set is converted to pset, but not recursively
- tuple is converted to tuple, recursively.
If strict == True (default):
- freeze is called on elements of pvectors
- freeze is called on values of pmaps
Sets and dict keys are not recursively frozen because they do not contain
mutable data by convention. The main exception to this rule is that
dict keys and set elements are often instances of mutable objects that
support hash-by-id, which this function can't convert anyway.
>>> freeze(set([1, 2]))
pset([1, 2])
>>> freeze([1, {'a': 3}])
pvector([1, pmap({'a': 3})])
>>> freeze((1, []))
(1, pvector([]))
"""
typ = type(o)
if typ is dict or (strict and isinstance(o, PMap)):
return pmap({k: freeze(v, strict) for k, v in o.items()})
if typ is list or (strict and isinstance(o, PVector)):
curried_freeze = lambda x: freeze(x, strict)
return pvector(map(curried_freeze, o))
if typ is tuple:
curried_freeze = lambda x: freeze(x, strict)
return tuple(map(curried_freeze, o))
if typ is set:
# impossible to have anything that needs freezing inside a set or pset
return pset(o)
return o
def thaw(o, strict=True):
"""
Recursively convert pyrsistent containers into simple Python containers.
- pvector is converted to list, recursively
- pmap is converted to dict, recursively on values (but not keys)
- pset is converted to set, but not recursively
- tuple is converted to tuple, recursively.
If strict == True (the default):
- thaw is called on elements of lists
- thaw is called on values in dicts
>>> from pyrsistent import s, m, v
>>> thaw(s(1, 2))
{1, 2}
>>> thaw(v(1, m(a=3)))
[1, {'a': 3}]
>>> thaw((1, v()))
(1, [])
"""
typ = type(o)
if isinstance(o, PVector) or (strict and typ is list):
curried_thaw = lambda x: thaw(x, strict)
return list(map(curried_thaw, o))
if isinstance(o, PMap) or (strict and typ is dict):
return {k: thaw(v, strict) for k, v in o.items()}
if typ is tuple:
curried_thaw = lambda x: thaw(x, strict)
return tuple(map(curried_thaw, o))
if isinstance(o, PSet):
# impossible to thaw inside psets or sets
return set(o)
return o
def mutant(fn):
"""
Convenience decorator to isolate mutation to within the decorated function (with respect
to the input arguments).
All arguments to the decorated function will be frozen so that they are guaranteed not to change.
The return value is also frozen.
"""
@wraps(fn)
def inner_f(*args, **kwargs):
return freeze(fn(*[freeze(e) for e in args], **dict(freeze(item) for item in kwargs.items())))
return inner_f
| {
"repo_name": "tobgu/pyrsistent",
"path": "pyrsistent/_helpers.py",
"copies": "1",
"size": "3232",
"license": "mit",
"hash": -633271564277723000,
"line_mean": 32.3195876289,
"line_max": 102,
"alpha_frac": 0.6417079208,
"autogenerated": false,
"ratio": 3.710677382319173,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9844935209876583,
"avg_score": 0.00149001864851804,
"num_lines": 97
} |
from functools import wraps
from random import *
from flask import render_template, flash, redirect, request, url_for, make_response
from sqlalchemy import desc
from sqlalchemy.exc import *
from vuln_corp import app
from vuln_corp import utils
from vuln_corp.choices import ISSUE_ASSIGNEES
from vuln_corp.forms import LoginForm, SignupForm, EditUserForm, EditIssueForm, IssueForm
from vuln_corp.models import db, User, Session, Groups, Issues
def get_user(f):
@wraps(f)
def decorated_function(*args, **kwargs):
session = None
user = None
try:
session = Session.query.filter(Session.session_id == request.cookies.get('session_id')).first()
if session is not None:
user = session.get_user()
except NoSuchTableError:
pass
return f(user=user, session=session, *args, **kwargs)
return decorated_function
@app.route('/')
@app.route('/index')
def index():
user = request.cookies.get('session_id')
return render_template('index.html', title='Home', user=user, group=request.cookies.get('group'))
@app.route('/login', methods=['GET', 'POST'])
def login():
# logs the user out if they are already logged in
if request.cookies.get('session_id') is not None:
response = make_response(redirect('/login'))
response.set_cookie('session_id', '', expires=0)
response.set_cookie('group', '', expires=0)
return response
form = LoginForm()
if request.method == 'POST':
if not form.validate_on_submit():
flash(
'login request Failed for username= "%s", password=%s' % (form.username.data, str(form.password.data)),
'danger')
return render_template('login.html', form=form, group=request.cookies.get('group'))
elif form.validate_on_submit:
user = User.query.filter(User.username == request.form.get('username')).first()
password = request.form.get('password')
if user.exists:
if user.password == password:
session_id = request.cookies.get('session_id', password + str(randint(1, 999)))
new_session = Session(user.username, session_id, True)
db.session.add(new_session)
db.session.commit()
response = make_response(redirect('/profile'))
response.set_cookie('session_id', value=session_id)
group = Groups.query.filter(Groups.id == user.group).first().groupname
response.set_cookie('group', value=group)
return response
flash('Password "%s" is incorrect' % form.password.data, 'danger')
else:
flash('User "%s" does not exist' % form.username.data, 'danger')
return render_template('/login', title='Login', group=request.cookies.get('group'))
elif request.method == 'GET':
return render_template('login.html', title='Sign in', form=form, group=request.cookies.get('group'))
@app.route("/logout", methods=['GET'])
@get_user
def logout(*args, **kwargs):
user = kwargs.get('user')
session = kwargs.get('session')
session.active = False
db.session.commit()
response = make_response(redirect('/index'))
response.set_cookie('session_id', '', expires=0)
return response
@app.route('/signup', methods=['GET', 'POST'])
def signup():
form = SignupForm()
form.group.choices = [(g.id, g.groupname) for g in Groups.query.all()]
if request.method == 'POST':
if not form.validate_on_submit():
flash('signup FAILED for requested username="{}", email="{}"'.format(form.username.data,
str(form.email.data)), 'danger')
return render_template('signup.html', title='Signup', form=form, group=request.cookies.get('group'))
else:
newuser = User(request.form.get('username'), request.form.get('firstname'), request.form.get('lastname'),
request.form.get('email'), request.form.get('password'), request.form.get('group'),
request.form.get('bio'))
db.session.add(newuser)
db.session.commit()
flash('Signup successful for requested username="{}", email="{}"'.format(form.username.data,
str(form.email.data)), 'success')
return redirect(url_for('login'))
elif request.method == 'GET':
return render_template('signup.html', form=form, group=request.cookies.get('group'))
@app.route('/profile')
@get_user
def profile(*args, **kwargs):
user = kwargs.get('user')
session = kwargs.get('session')
return render_template('profile.html', user=user, session=session, group=request.cookies.get('group'))
@app.route('/testdb')
def testdb():
if db.session.query("1").from_statement("SELECT 1").all():
return 'It works.'
else:
return 'Something is broken.'
@app.route('/users')
@get_user
def users(*args, **kwargs):
user = kwargs.get('user')
session = kwargs.get('session')
users = User.query.all()
return render_template('show_users.html', users=users, user=user, group=request.cookies.get('group'))
@app.route('/sessions')
@get_user
def sessions(*args, **kwargs):
user = kwargs.get('user')
session = kwargs.get('session')
sessions = Session.query.all()
return render_template('show_sessions.html', sessions=sessions, user=user, group=request.cookies.get('group'))
@app.route('/about')
@get_user
def about(*args, **kwargs):
user = kwargs.get('user')
session = kwargs.get('session')
return render_template('about.html', user=user, group=request.cookies.get('group'))
@app.route('/users/<username>')
@get_user
def viewuser(username, *args, **kwargs):
user = kwargs.get('user')
session = kwargs.get('session')
viewuser = User.query.filter(User.username == username).first()
return render_template('user.html', session=session, user=user, viewuser=viewuser,
group=request.cookies.get('group'))
@app.route('/settings', methods=['GET', 'POST'])
@get_user
def settings(*args, **kwargs):
user = kwargs.get('user')
session = kwargs.get('session')
form = EditUserForm(request.form)
form.group.choices = [(g.id, g.groupname) for g in Groups.query.all()]
# initialize form with current data
form.firstname.default = user.firstname.title()
form.lastname.default = user.lastname.title()
form.email.default = user.email
form.password.default = user.password
form.group.default = int(user.group)
form.bio.default = user.bio
form.process()
if request.method == 'POST':
if not form.validate_on_submit():
flash('Error Validating form', 'danger')
return render_template('/settings.html', form=form, user=user, session=session,
group=request.cookies.get('group'))
elif form.validate_on_submit():
user.firstname = request.form.get('firstname')
user.lastname = request.form.get('lastname')
user.password = request.form.get('password')
user.email = request.form.get('email')
user.group = request.form.get('group')
user.bio = request.form.get('bio')
db.session.commit()
return redirect(url_for('profile'))
elif request.method == 'GET':
return render_template('/settings.html', user=user, session=session, form=form,
group=request.cookies.get('group'))
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.route('/issues')
@get_user
def issues(*args, **kwargs):
user = kwargs.get('user')
session = kwargs.get('session')
issues_new = Issues.query.filter(Issues.status == 'New').order_by(desc(Issues.issued_date)).all()
issues_in_progress = Issues.query.filter(Issues.status == 'In Progress').order_by(desc(Issues.issued_date)).all()
issues_done = Issues.query.filter(Issues.status == 'Closed').order_by(desc(Issues.issued_date)).all()
return render_template('issues.html', user=user, session=session, issues_new=issues_new,
issues_in_progress=issues_in_progress, issues_done=issues_done,
group=request.cookies.get('group'))
@app.route('/issues/<id>', methods=['GET', 'POST'])
@get_user
def issue(id, *args, **kwargs):
user = kwargs.get('user')
session = kwargs.get('session')
sql = "select * from Issues WHERE id ==" + id
for command in sql.split(';'):
result = db.engine.execute(command)
for row in result:
sql_issue = row
issue = utils.get_issue_from_id(sql_issue.id)
form = EditIssueForm()
# initialize form with current data
print(issue.summary)
form.summary.default = issue.summary
form.title.default = issue.title
form.assignee.default = issue.assignee
form.status.default = issue.status
form.assignee.choices = ISSUE_ASSIGNEES
form.process()
if request.method == 'POST':
if not form.validate_on_submit():
flash('The issue was unable to be updated', 'danger')
return render_template('issue.html', issue=issue, form=form, user=user, group=request.cookies.get('group'))
else:
issue.title = request.form.get('title')
issue.summary = request.form.get('summary')
issue.assignee = request.form.get('assignee')
issue.status = request.form.get('status')
db.session.commit()
return redirect('issues/' + str(issue.id))
elif request.method == 'GET':
return render_template('issue.html', issue=issue, form=form, user=user, group=request.cookies.get('group'))
@app.route('/issues/create', methods=['GET', 'POST'])
@get_user
def create_issue(*args, **kwargs):
user = kwargs.get('user')
session = kwargs.get('session')
form = IssueForm()
username = user.username
if request.method == 'POST':
if not form.validate_on_submit():
return render_template('newissue.html', form=form, group=request.cookies.get('group'))
elif form.validate_on_submit():
newissue = Issues(form.summary.data, form.title.data, user.username)
db.session.add(newissue)
db.session.commit()
return redirect(url_for('issues'))
elif request.method == 'GET':
return render_template('newissue.html', form=form, user=user, group=request.cookies.get('group'))
return render_template('newissue.html', group=request.cookies.get('group'))
| {
"repo_name": "jeff350/vuln-corp",
"path": "vuln_corp/views.py",
"copies": "1",
"size": "10866",
"license": "mit",
"hash": 4056743714003733000,
"line_mean": 39.6966292135,
"line_max": 119,
"alpha_frac": 0.6156819437,
"autogenerated": false,
"ratio": 3.826056338028169,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9934590966484278,
"avg_score": 0.0014294630487779614,
"num_lines": 267
} |
from functools import wraps
from redis import Redis
class Redisns(object):
key_commands = [
'set',
'__setitem__',
'get',
'__getitem__',
'getset',
'incr',
'decr',
'exists',
'delete',
'get_type',
'keys',
'ttl',
'expire',
'push',
'llen',
'lrange',
'ltrim',
'lindex',
'pop',
'lset',
'lrem',
'sort',
'sadd',
'srem',
'spop',
'scard',
'sismember',
'smembers',
'srandmember',
'zadd',
'zrem',
'zrange',
'zrangebyscore',
'zcard',
'zscore',
'move'
]
mkeys_commands = [
'mget',
'rename',
'poppush',
'smove',
'sinter',
'sinterstore',
'sunion',
'sunionstore',
'sdiff',
'sdiffstore',
]
def __init__(self, namespace=None, *args, **kwargs):
if namespace and not namespace.endswith(":"):
namespace += ":"
self.namespace = namespace
self._db = Redis(*args, **kwargs)
def __getattr__(self, attr):
if not hasattr(self._db, attr):
raise AttributeError("'Redis' class has no attribute '%s'" % attr)
@wraps(self._db.__getattribute__(attr))
def missing_method(*args, **kwargs):
args = list(args)
if attr in Redisns.key_commands:
args[0] = "{0}{1}".format(self.namespace, args[0])
elif attr in Redisns.mkeys_commands:
for arg in range(len(args)):
args[arg] = "{0}{1}".format(self.namespace, args[arg])
return self._db.__getattribute__(attr)(*args, **kwargs)
return missing_method
def __getitem__(self, attr):
return self.get(attr)
def __setitem__(self, attr, value):
return self.set(attr, value)
def __delitem__(self, attr):
return self.delete(attr)
| {
"repo_name": "rafacv/redisns",
"path": "__init__.py",
"copies": "1",
"size": "2657",
"license": "mit",
"hash": 4534189385886975500,
"line_mean": 33.0641025641,
"line_max": 78,
"alpha_frac": 0.3537824614,
"autogenerated": false,
"ratio": 5.080305927342256,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0025354428815527216,
"num_lines": 78
} |
from functools import wraps
from re import sub
from flask import Blueprint, render_template, url_for, abort, g
from flask.ext.admin import babel
from flask.ext.admin._compat import with_metaclass
from flask.ext.admin import helpers as h
def expose(url='/', methods=('GET',)):
"""
Use this decorator to expose views in your view classes.
:param url:
Relative URL for the view
:param methods:
Allowed HTTP methods. By default only GET is allowed.
"""
def wrap(f):
if not hasattr(f, '_urls'):
f._urls = []
f._urls.append((url, methods))
return f
return wrap
def expose_plugview(url='/'):
"""
Decorator to expose Flask's pluggable view classes
(``flask.views.View`` or ``flask.views.MethodView``).
:param url:
Relative URL for the view
.. versionadded:: 1.0.4
"""
def wrap(v):
handler = expose(url, v.methods)
if hasattr(v, 'as_view'):
return handler(v.as_view(v.__name__))
else:
return handler(v)
return wrap
# Base views
def _wrap_view(f):
@wraps(f)
def inner(self, *args, **kwargs):
# Store current admin view
h.set_current_view(self)
# Check if administrative piece is accessible
abort = self._handle_view(f.__name__, **kwargs)
if abort is not None:
return abort
return f(self, *args, **kwargs)
return inner
class AdminViewMeta(type):
"""
View metaclass.
Does some precalculations (like getting list of view methods from the class) to avoid
calculating them for each view class instance.
"""
def __init__(cls, classname, bases, fields):
type.__init__(cls, classname, bases, fields)
# Gather exposed views
cls._urls = []
cls._default_view = None
for p in dir(cls):
attr = getattr(cls, p)
if hasattr(attr, '_urls'):
# Collect methods
for url, methods in attr._urls:
cls._urls.append((url, p, methods))
if url == '/':
cls._default_view = p
# Wrap views
setattr(cls, p, _wrap_view(attr))
class BaseViewClass(object):
pass
class BaseView(with_metaclass(AdminViewMeta, BaseViewClass)):
"""
Base administrative view.
Derive from this class to implement your administrative interface piece. For example::
class MyView(BaseView):
@expose('/')
def index(self):
return 'Hello World!'
"""
@property
def _template_args(self):
"""
Extra template arguments.
If you need to pass some extra parameters to the template,
you can override particular view function, contribute
arguments you want to pass to the template and call parent view.
These arguments are local for this request and will be discarded
in the next request.
Any value passed through ``_template_args`` will override whatever
parent view function passed to the template.
For example::
class MyAdmin(ModelView):
@expose('/')
def index(self):
self._template_args['name'] = 'foobar'
self._template_args['code'] = '12345'
super(MyAdmin, self).index()
"""
args = getattr(g, '_admin_template_args', None)
if args is None:
args = g._admin_template_args = dict()
return args
def __init__(self, name=None, category=None, endpoint=None, url=None,
static_folder=None, static_url_path=None):
"""
Constructor.
:param name:
Name of this view. If not provided, will default to the class name.
:param category:
View category. If not provided, this view will be shown as a top-level menu item. Otherwise, it will
be in a submenu.
:param endpoint:
Base endpoint name for the view. For example, if there's a view method called "index" and
endpoint is set to "myadmin", you can use `url_for('myadmin.index')` to get the URL to the
view method. Defaults to the class name in lower case.
:param url:
Base URL. If provided, affects how URLs are generated. For example, if the url parameter
is "test", the resulting URL will look like "/admin/test/". If not provided, will
use endpoint as a base url. However, if URL starts with '/', absolute path is assumed
and '/admin/' prefix won't be applied.
:param static_url_path:
Static URL Path. If provided, this specifies the path to the static url directory.
:param debug:
Optional debug flag. If set to `True`, will rethrow exceptions in some cases, so Werkzeug
debugger can catch them.
"""
self.name = name
self.category = category
self.endpoint = endpoint
self.url = url
self.static_folder = static_folder
self.static_url_path = static_url_path
# Initialized from create_blueprint
self.admin = None
self.blueprint = None
# Default view
if self._default_view is None:
raise Exception(u'Attempted to instantiate admin view %s without default view' % self.__class__.__name__)
def create_blueprint(self, admin):
"""
Create Flask blueprint.
"""
# Store admin instance
self.admin = admin
# If endpoint name is not provided, get it from the class name
if self.endpoint is None:
self.endpoint = self.__class__.__name__.lower()
# If the static_url_path is not provided, use the admin's
if not self.static_url_path:
self.static_url_path = admin.static_url_path
# If url is not provided, generate it from endpoint name
if self.url is None:
if self.admin.url != '/':
self.url = '%s/%s' % (self.admin.url, self.endpoint)
else:
if self == admin.index_view:
self.url = '/'
else:
self.url = '/%s' % self.endpoint
else:
if not self.url.startswith('/'):
self.url = '%s/%s' % (self.admin.url, self.url)
# If we're working from the root of the site, set prefix to None
if self.url == '/':
self.url = None
# If name is not povided, use capitalized endpoint name
if self.name is None:
self.name = self._prettify_name(self.__class__.__name__)
# Create blueprint and register rules
self.blueprint = Blueprint(self.endpoint, __name__,
url_prefix=self.url,
subdomain=self.admin.subdomain,
template_folder='templates',
static_folder=self.static_folder,
static_url_path=self.static_url_path)
for url, name, methods in self._urls:
self.blueprint.add_url_rule(url,
name,
getattr(self, name),
methods=methods)
return self.blueprint
def render(self, template, **kwargs):
"""
Render template
:param template:
Template path to render
:param kwargs:
Template arguments
"""
# Store self as admin_view
kwargs['admin_view'] = self
kwargs['admin_base_template'] = self.admin.base_template
# Provide i18n support even if flask-babel is not installed
# or enabled.
kwargs['_gettext'] = babel.gettext
kwargs['_ngettext'] = babel.ngettext
kwargs['h'] = h
# Contribute extra arguments
kwargs.update(self._template_args)
return render_template(template, **kwargs)
def _prettify_name(self, name):
"""
Prettify a class name by splitting the name on capitalized characters. So, 'MySuperClass' becomes 'My Super Class'
:param name:
String to prettify
"""
return sub(r'(?<=.)([A-Z])', r' \1', name)
def is_visible(self):
"""
Override this method if you want dynamically hide or show administrative views
from Flask-Admin menu structure
By default, item is visible in menu.
Please note that item should be both visible and accessible to be displayed in menu.
"""
return True
def is_accessible(self):
"""
Override this method to add permission checks.
Flask-Admin does not make any assumptions about the authentication system used in your application, so it is
up to you to implement it.
By default, it will allow access for everyone.
"""
return True
def _handle_view(self, name, **kwargs):
"""
This method will be executed before calling any view method.
By default, it will check if the admin class is accessible and if it is not it will
throw HTTP 404 error.
:param name:
View function name
:param kwargs:
View function arguments
"""
if not self.is_accessible():
return abort(403)
@property
def _debug(self):
if not self.admin or not self.admin.app:
return False
return self.admin.app.debug
class AdminIndexView(BaseView):
"""
Default administrative interface index page when visiting the ``/admin/`` URL.
It can be overridden by passing your own view class to the ``Admin`` constructor::
class MyHomeView(AdminIndexView):
@expose('/')
def index(self):
arg1 = 'Hello'
return render_template('adminhome.html', arg1=arg1)
admin = Admin(index_view=MyHomeView())
Default values for the index page are:
* If a name is not provided, 'Home' will be used.
* If an endpoint is not provided, will default to ``admin``
* Default URL route is ``/admin``.
* Automatically associates with static folder.
* Default template is ``admin/index.html``
"""
def __init__(self, name=None, category=None,
endpoint=None, url=None,
template='admin/index.html'):
super(AdminIndexView, self).__init__(name or babel.lazy_gettext('Home'),
category,
endpoint or 'admin',
url or '/admin',
'static')
self._template = template
@expose()
def index(self):
return self.render(self._template)
class MenuItem(object):
"""
Simple menu tree hierarchy.
"""
def __init__(self, name, view=None):
self.name = name
self._view = view
self._children = []
self._children_urls = set()
self._cached_url = None
self.url = None
if view is not None:
self.url = view.url
def add_child(self, view):
self._children.append(view)
self._children_urls.add(view.url)
def get_url(self):
if self._view is None:
return None
if self._cached_url:
return self._cached_url
self._cached_url = url_for('%s.%s' % (self._view.endpoint, self._view._default_view))
return self._cached_url
def is_active(self, view):
if view == self._view:
return True
return view.url in self._children_urls
def is_visible(self):
if self._view is None:
return False
return self._view.is_visible()
def is_accessible(self):
if self._view is None:
return False
return self._view.is_accessible()
def is_category(self):
return self._view is None
def get_children(self):
return [c for c in self._children if c.is_accessible() and c.is_visible()]
class MenuLink(object):
"""
Additional menu links.
"""
def __init__(self, name, url=None, endpoint=None):
self.name = name
self.url = url
self.endpoint = endpoint
def get_url(self):
return self.url or url_for(self.endpoint)
def is_visible(self):
return True
def is_accessible(self):
return True
class Admin(object):
"""
Collection of the admin views. Also manages menu structure.
"""
def __init__(self, app=None, name=None,
url=None, subdomain=None,
index_view=None,
translations_path=None,
endpoint=None,
static_url_path=None,
base_template=None):
"""
Constructor.
:param app:
Flask application object
:param name:
Application name. Will be displayed in the main menu and as a page title. Defaults to "Admin"
:param url:
Base URL
:param subdomain:
Subdomain to use
:param index_view:
Home page view to use. Defaults to `AdminIndexView`.
:param translations_path:
Location of the translation message catalogs. By default will use the translations
shipped with Flask-Admin.
:param endpoint:
Base endpoint name for index view. If you use multiple instances of the `Admin` class with
a single Flask application, you have to set a unique endpoint name for each instance.
:param static_url_path:
Static URL Path. If provided, this specifies the default path to the static url directory for
all its views. Can be overridden in view configuration.
:param base_template:
Override base HTML template for all static views. Defaults to `admin/base.html`.
"""
self.app = app
self.translations_path = translations_path
self._views = []
self._menu = []
self._menu_categories = dict()
self._menu_links = []
if name is None:
name = 'Admin'
self.name = name
self.index_view = index_view or AdminIndexView(endpoint=endpoint, url=url)
self.endpoint = endpoint or self.index_view.endpoint
self.url = url or self.index_view.url
self.static_url_path = static_url_path
self.subdomain = subdomain
self.base_template = base_template or 'admin/base.html'
# Add predefined index view
self.add_view(self.index_view)
# Register with application
if app is not None:
self._init_extension()
def add_view(self, view):
"""
Add a view to the collection.
:param view:
View to add.
"""
# Add to views
self._views.append(view)
# If app was provided in constructor, register view with Flask app
if self.app is not None:
self.app.register_blueprint(view.create_blueprint(self))
self._add_view_to_menu(view)
def add_link(self, link):
"""
Add link to menu links collection.
:param link:
Link to add.
"""
self._menu_links.append(link)
def _add_view_to_menu(self, view):
"""
Add a view to the menu tree
:param view:
View to add
"""
if view.category:
category = self._menu_categories.get(view.category)
if category is None:
category = MenuItem(view.category)
self._menu_categories[view.category] = category
self._menu.append(category)
category.add_child(MenuItem(view.name, view))
else:
self._menu.append(MenuItem(view.name, view))
def init_app(self, app):
"""
Register all views with the Flask application.
:param app:
Flask application instance
"""
self.app = app
self._init_extension()
# Register views
for view in self._views:
app.register_blueprint(view.create_blueprint(self))
self._add_view_to_menu(view)
def _init_extension(self):
if not hasattr(self.app, 'extensions'):
self.app.extensions = dict()
admins = self.app.extensions.get('admin', [])
for p in admins:
if p.endpoint == self.endpoint:
raise Exception(u'Cannot have two Admin() instances with same'
u' endpoint name.')
if p.url == self.url and p.subdomain == self.subdomain:
raise Exception(u'Cannot assign two Admin() instances with same'
u' URL and subdomain to the same application.')
admins.append(self)
self.app.extensions['admin'] = admins
def menu(self):
"""
Return the menu hierarchy.
"""
return self._menu
def menu_links(self):
"""
Return menu links.
"""
return self._menu_links
| {
"repo_name": "wilsaj/flask-admin",
"path": "flask_admin/base.py",
"copies": "1",
"size": "17990",
"license": "bsd-3-clause",
"hash": -8630699038251922000,
"line_mean": 30.5614035088,
"line_max": 126,
"alpha_frac": 0.5423012785,
"autogenerated": false,
"ratio": 4.648578811369509,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.569088008986951,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from re import sub
from flask import Blueprint, render_template, url_for, abort, g
from flask.ext.admin import babel
from flask.ext.admin import helpers as h
def expose(url='/', methods=('GET',)):
"""
Use this decorator to expose views in your view classes.
:param url:
Relative URL for the view
:param methods:
Allowed HTTP methods. By default only GET is allowed.
"""
def wrap(f):
if not hasattr(f, '_urls'):
f._urls = []
f._urls.append((url, methods))
return f
return wrap
def expose_plugview(url='/'):
"""
Decorator to expose Flask's pluggable view classes
(``flask.views.View`` or ``flask.views.MethodView``).
:param url:
Relative URL for the view
.. versionadded:: 1.0.4
"""
def wrap(v):
return expose(url, v.methods)(v.as_view(v.__name__))
return wrap
# Base views
def _wrap_view(f):
@wraps(f)
def inner(self, **kwargs):
# Store current admin view
h.set_current_view(self)
# Check if administrative piece is accessible
abort = self._handle_view(f.__name__, **kwargs)
if abort is not None:
return abort
return f(self, **kwargs)
return inner
class AdminViewMeta(type):
"""
View metaclass.
Does some precalculations (like getting list of view methods from the class) to avoid
calculating them for each view class instance.
"""
def __init__(cls, classname, bases, fields):
type.__init__(cls, classname, bases, fields)
# Gather exposed views
cls._urls = []
cls._default_view = None
for p in dir(cls):
attr = getattr(cls, p)
if hasattr(attr, '_urls'):
# Collect methods
for url, methods in attr._urls:
cls._urls.append((url, p, methods))
if url == '/':
cls._default_view = p
# Wrap views
setattr(cls, p, _wrap_view(attr))
class BaseView(object):
"""
Base administrative view.
Derive from this class to implement your administrative interface piece. For example::
class MyView(BaseView):
@expose('/')
def index(self):
return 'Hello World!'
"""
__metaclass__ = AdminViewMeta
@property
def _template_args(self):
"""
Extra template arguments.
If you need to pass some extra parameters to the template,
you can override particular view function, contribute
arguments you want to pass to the template and call parent view.
These arguments are local for this request and will be discarded
in next request.
Any value passed through ``_template_args`` will override whatever
parent view function passed to the template.
For example::
class MyAdmin(ModelView):
@expose('/')
def index(self):
self._template_args['name'] = 'foobar'
self._template_args['code'] = '12345'
super(MyAdmin, self).index()
"""
args = getattr(g, '_admin_template_args', None)
if args is None:
args = g._admin_template_args = dict()
return args
def __init__(self, name=None, category=None, endpoint=None, url=None, static_folder=None):
"""
Constructor.
:param name:
Name of this view. If not provided, will be defaulted to the class name.
:param category:
View category. If not provided, will be shown as a top-level menu item. Otherwise, will
be in a submenu.
:param endpoint:
Base endpoint name for the view. For example, if there's view method called "index" and
endpoint was set to "myadmin", you can use `url_for('myadmin.index')` to get URL to the
view method. By default, equals to the class name in lower case.
:param url:
Base URL. If provided, affects how URLs are generated. For example, if url parameter
equals to "test", resulting URL will look like "/admin/test/". If not provided, will
use endpoint as a base url. However, if URL starts with '/', absolute path is assumed
and '/admin/' prefix won't be applied.
"""
self.name = name
self.category = category
self.endpoint = endpoint
self.url = url
self.static_folder = static_folder
# Initialized from create_blueprint
self.admin = None
self.blueprint = None
# Default view
if self._default_view is None:
raise Exception('Attempted to instantiate admin view %s without default view' % self.__class__.__name__)
def create_blueprint(self, admin):
"""
Create Flask blueprint.
"""
# Store admin instance
self.admin = admin
# If endpoint name is not provided, get it from the class name
if self.endpoint is None:
self.endpoint = self.__class__.__name__.lower()
# If url is not provided, generate it from endpoint name
if self.url is None:
if self.admin.url != '/':
self.url = '%s/%s' % (self.admin.url, self.endpoint)
else:
self.url = '/'
else:
if not self.url.startswith('/'):
self.url = '%s/%s' % (self.admin.url, self.url)
# If we're working from the root of the site, set prefix to None
if self.url == '/':
self.url = None
# If name is not povided, use capitalized endpoint name
if self.name is None:
self.name = self._prettify_name(self.__class__.__name__)
# Create blueprint and register rules
self.blueprint = Blueprint(self.endpoint, __name__,
url_prefix=self.url,
subdomain=self.admin.subdomain,
template_folder='templates',
static_folder=self.static_folder)
for url, name, methods in self._urls:
self.blueprint.add_url_rule(url,
name,
getattr(self, name),
methods=methods)
return self.blueprint
def render(self, template, **kwargs):
"""
Render template
:param template:
Template path to render
:param kwargs:
Template arguments
"""
# Store self as admin_view
kwargs['admin_view'] = self
# Provide i18n support even if flask-babel is not installed
# or enabled.
kwargs['_gettext'] = babel.gettext
kwargs['_ngettext'] = babel.ngettext
kwargs['h'] = h
# Contribute extra arguments
kwargs.update(self._template_args)
return render_template(template, **kwargs)
def _prettify_name(self, name):
"""
Prettify class name by splitting name by capital characters. So, 'MySuperClass' will look like 'My Super Class'
:param name:
String to prettify
"""
return sub(r'(?<=.)([A-Z])', r' \1', name)
def is_accessible(self):
"""
Override this method to add permission checks.
Flask-Admin does not make any assumptions about authentication system used in your application, so it is
up for you to implement it.
By default, it will allow access for the everyone.
"""
return True
def _handle_view(self, name, **kwargs):
"""
This method will be executed before calling any view method.
By default, it will check if admin class is accessible and if it is not - will
throw HTTP 404 error.
:param name:
View function name
:param kwargs:
View function arguments
"""
if not self.is_accessible():
return abort(404)
class AdminIndexView(BaseView):
"""
Default administrative interface index page when visiting the ``/admin/`` URL.
It can be overridden by passing your own view class to the ``Admin`` constructor::
class MyHomeView(AdminIndexView):
@expose('/')
def index(self):
arg1 = 'Hello'
return render_template('adminhome.html', arg1=arg1)
admin = Admin(index_view=MyHomeView())
Default values for the index page are following:
* If name is not provided, 'Home' will be used.
* If endpoint is not provided, will use ``admin``
* Default URL route is ``/admin``.
* Automatically associates with static folder.
* Default template is ``admin/index.html``
"""
def __init__(self, name=None, category=None,
endpoint=None, url=None,
template='admin/index.html'):
super(AdminIndexView, self).__init__(name or babel.lazy_gettext('Home'),
category,
endpoint or 'admin',
url or '/admin',
'static')
self._template = template
@expose()
def index(self):
return self.render(self._template)
class MenuItem(object):
"""
Simple menu tree hierarchy.
"""
def __init__(self, name, view=None):
self.name = name
self._view = view
self._children = []
self._children_urls = set()
self._cached_url = None
self.url = None
if view is not None:
self.url = view.url
def add_child(self, view):
self._children.append(view)
self._children_urls.add(view.url)
def get_url(self):
if self._view is None:
return None
if self._cached_url:
return self._cached_url
self._cached_url = url_for('%s.%s' % (self._view.endpoint, self._view._default_view))
return self._cached_url
def is_active(self, view):
if view == self._view:
return True
return view.url in self._children_urls
def is_accessible(self):
if self._view is None:
return False
return self._view.is_accessible()
def is_category(self):
return self._view is None
def get_children(self):
return [c for c in self._children if c.is_accessible()]
class Admin(object):
"""
Collection of the views. Also manages menu structure.
"""
def __init__(self, app=None, name=None,
url=None, subdomain=None,
index_view=None,
translations_path=None,
endpoint=None):
"""
Constructor.
:param app:
Flask application object
:param name:
Application name. Will be displayed in main menu and as a page title. If not provided, defaulted to "Admin"
:param url:
Base URL
:param subdomain:
Subdomain to use
:param index_view:
Home page view to use. If not provided, will use `AdminIndexView`.
:param translations_path:
Location of the translation message catalogs. By default will use translations
shipped with the Flask-Admin.
:param endpoint:
Base endpoint name for index view. If you use multiple instances of `Admin` class with
one Flask application, you have to set unique endpoint name for each instance.
"""
self.app = app
self.translations_path = translations_path
self._views = []
self._menu = []
self._menu_categories = dict()
if name is None:
name = 'Admin'
self.name = name
self.index_view = index_view or AdminIndexView(endpoint=endpoint, url=url)
self.endpoint = endpoint or self.index_view.endpoint
self.url = url or self.index_view.url
self.subdomain = subdomain
# Add predefined index view
self.add_view(self.index_view)
# Localizations
self.locale_selector_func = None
# Register with application
if app:
self._init_extension()
def add_view(self, view):
"""
Add view to the collection.
:param view:
View to add.
"""
# Add to views
self._views.append(view)
# If app was provided in constructor, register view with Flask app
if self.app is not None:
self.app.register_blueprint(view.create_blueprint(self))
self._add_view_to_menu(view)
def locale_selector(self, f):
"""
Installs locale selector for current ``Admin`` instance.
Example::
def admin_locale_selector():
return request.args.get('lang', 'en')
admin = Admin(app)
admin.locale_selector(admin_locale_selector)
It is also possible to use the ``@admin`` decorator::
admin = Admin(app)
@admin.locale_selector
def admin_locale_selector():
return request.args.get('lang', 'en')
Or by subclassing the ``Admin``::
class MyAdmin(Admin):
def locale_selector(self):
return request.args.get('lang', 'en')
"""
if self.locale_selector_func is not None:
raise Exception('Can not add locale_selector second time.')
self.locale_selector_func = f
def _add_view_to_menu(self, view):
"""
Add view to the menu tree
:param view:
View to add
"""
if view.category:
category = self._menu_categories.get(view.category)
if category is None:
category = MenuItem(view.category)
self._menu_categories[view.category] = category
self._menu.append(category)
category.add_child(MenuItem(view.name, view))
else:
self._menu.append(MenuItem(view.name, view))
def init_app(self, app):
"""
Register all views with Flask application.
:param app:
Flask application instance
"""
self.app = app
self._init_extension()
# Register views
for view in self._views:
app.register_blueprint(view.create_blueprint(self))
self._add_view_to_menu(view)
def _init_extension(self):
if not hasattr(self.app, 'extensions'):
self.app.extensions = dict()
admins = self.app.extensions.get('admin', [])
for p in admins:
if p.endpoint == self.endpoint:
raise Exception('Cannot have two Admin() instances with same'
' endpoint name.')
if p.url == self.url and p.subdomain == self.subdomain:
raise Exception('Cannot assign two Admin() instances with same'
' URL and subdomain to the same application.')
admins.append(self)
self.app.extensions['admin'] = admins
def menu(self):
"""
Return menu hierarchy.
"""
return self._menu
| {
"repo_name": "0atman/flask-admin",
"path": "flask_admin/base.py",
"copies": "1",
"size": "16041",
"license": "bsd-3-clause",
"hash": -8884900048795488000,
"line_mean": 30.3913894325,
"line_max": 123,
"alpha_frac": 0.5356274546,
"autogenerated": false,
"ratio": 4.717941176470588,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005054044524582406,
"num_lines": 511
} |
from functools import wraps
from robust.exception import (ContinuousFailureException,
TimeoutException,
ConnectionCutException)
def _fail(ex, on_fail=None):
if on_fail:
on_fail()
else:
raise ex
def retry(limit, on_fail=None):
"""
Retries same function N times, goes to fail callback if unable to succeed
"""
def injector(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
for _ in range(0, limit):
try:
return fn(*args, **kwargs)
except Exception:
continue
# If you're here - you deserved it
_fail(ContinuousFailureException, on_fail)
return wrapper
return injector
def timeout(limit, on_fail=None):
"""
Waits for function to respond N seconds
"""
def injector(fn):
from robust.alarm import alarm_context
def timeout_handler():
return _fail(TimeoutException, on_fail)
@wraps(fn)
def wrapper(*args, **kwargs):
with alarm_context(limit, timeout_handler):
return fn(*args, **kwargs)
return wrapper
return injector
def breaker(limit, revive, on_fail=None):
"""
Allows :limit: failures, after which it cuts connection.
After :revive: seconds it allows one connection to pass.
If it succeeds - counter is reset, if doesn't - we wait another :revive: seconds
"""
def injector(fn):
from robust.alarm import alarm_create
counter = 0
reset_fn = None
def revive_handler():
nonlocal counter
counter -= 1
@wraps(fn)
def wrapper(*args, **kwargs):
nonlocal counter
nonlocal reset_fn
if counter >= limit:
return _fail(ConnectionCutException, on_fail)
result = None
try:
result = fn(*args, **kwargs)
except Exception:
counter += 1
if counter >= limit:
reset_fn = alarm_create(revive, revive_handler)
raise
else:
if reset_fn:
reset_fn()
counter = 0
return result
return wrapper
return injector
| {
"repo_name": "zaibacu/pyRobustness",
"path": "robust/tools.py",
"copies": "1",
"size": "2386",
"license": "mit",
"hash": 2859437107756931600,
"line_mean": 24.9347826087,
"line_max": 84,
"alpha_frac": 0.526823135,
"autogenerated": false,
"ratio": 4.642023346303502,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0001278772378516624,
"num_lines": 92
} |
from functools import wraps
from sample_data_utils.exception import MaxAttemptException
def infinite():
"""
auto inc generator
"""
i = 0
while 1:
yield i
i += 1
_sequence_counters = {}
def sequence(prefix, cache=None):
"""
generator that returns an unique string
:param prefix: prefix of string
:param cache: cache used to store the last used number
>>> next(sequence('abc'))
'abc-0'
>>> next(sequence('abc'))
'abc-1'
"""
if cache is None:
cache = _sequence_counters
if cache == -1:
cache = {}
if prefix not in cache:
cache[prefix] = infinite()
while cache[prefix]:
yield "{0}-{1}".format(prefix, next(cache[prefix]))
def _get_memoized_value(func, args, kwargs):
"""Used internally by memoize decorator to get/store function results"""
key = (repr(args), repr(kwargs))
if not key in func._cache_dict:
ret = func(*args, **kwargs)
func._cache_dict[key] = ret
return func._cache_dict[key]
def memoize(func):
"""Decorator that stores function results in a dictionary to be used on the
next time that the same arguments were informed."""
func._cache_dict = {}
@wraps(func)
def _inner(*args, **kwargs):
return _get_memoized_value(func, args, kwargs)
return _inner
_cache_unique = {}
def unique(func, num_args=0, max_attempts=100, cache=None):
"""
wraps a function so that produce unique results
:param func:
:param num_args:
>>> import random
>>> choices = [1,2]
>>> a = unique(random.choice, 1)
>>> a,b = a(choices), a(choices)
>>> a == b
False
"""
if cache is None:
cache = _cache_unique
@wraps(func)
def wrapper(*args):
key = "%s_%s" % (str(func.__name__), str(args[:num_args]))
attempt = 0
while attempt < max_attempts:
attempt += 1
drawn = cache.get(key, [])
result = func(*args)
if result not in drawn:
drawn.append(result)
cache[key] = drawn
return result
raise MaxAttemptException()
return wrapper
| {
"repo_name": "saxix/sample-data-utils",
"path": "sample_data_utils/utils.py",
"copies": "1",
"size": "2211",
"license": "bsd-3-clause",
"hash": 7710956275038409000,
"line_mean": 21.11,
"line_max": 79,
"alpha_frac": 0.5694255993,
"autogenerated": false,
"ratio": 3.9063604240282683,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49757860233282686,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from sanic import response
from sanic.request import Request
from ssr_panel.models import User
def login_required(f):
@wraps(f)
async def decorated_function(*args, **kwargs):
for arg in args:
if isinstance(arg, Request):
uid = arg.get('session', {}).get('uid')
if uid is None:
return response.redirect(arg.app.url_for('auth.LoginView', next=arg.url))
try:
user = await User.objects.get(User.id == uid)
arg['uid'] = uid
arg['user'] = user
except User.DoesNotExist:
return response.redirect(arg.app.url_for('auth.LoginView', next=arg.url))
break
return await f(*args, **kwargs)
return decorated_function
def admin_required(f):
@wraps(f)
async def decorated_function(*args, **kwargs):
for arg in args:
if isinstance(arg, Request):
uid = arg.get('session', {}).get('uid')
if uid is None:
return response.redirect(arg.app.url_for('auth.LoginView', next=arg.url))
try:
user = await User.objects.get(User.id == uid, User.is_admin == 1)
arg['uid'] = uid
arg['user'] = user
except User.DoesNotExist:
return response.redirect(arg.app.url_for('auth.LoginView', next=arg.url))
break
return await f(*args, **kwargs)
return decorated_function
def login_optional(f):
@wraps(f)
async def decorated_function(*args, **kwargs):
for arg in args:
if isinstance(arg, Request):
uid = arg.get('session', {}).get('uid')
arg['uid'] = uid
if uid is not None:
try:
user = await User.objects.get(User.id == uid)
arg['user'] = user
except User.DoesNotExist:
arg['user'] = None
else:
arg['user'] = None
break
return await f(*args, **kwargs)
return decorated_function
| {
"repo_name": "gaolycn/ssr-panel-sanic",
"path": "utils/decorators.py",
"copies": "1",
"size": "2262",
"license": "mit",
"hash": 8877623083269326000,
"line_mean": 35.4838709677,
"line_max": 93,
"alpha_frac": 0.4991158267,
"autogenerated": false,
"ratio": 4.470355731225297,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5469471557925296,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from sanic.response import json
from sanic_redis import Namespace
from App.model import User
def captcha_check(_type):
def decorator(func):
@wraps(func)
async def handler(request, *args, **kwargs):
json_data = request.json
if request.method == "GET":
return await func(request, *args, **kwargs)
else:
captcha_id = json_data.get("captcha_id")
captcha_code = json_data.get("captcha_code")
if not all([captcha_id,captcha_code]):
return json({"message":"传入的json中需要有字段captcha_id,captcha_code"},400)
else:
captcha_code = captcha_code.encode("utf-8")
namespace = Namespace(request.app.name + "-captcha"+"-"+_type)
code = await request.app.redis["captcha"].get(namespace(captcha_id))
if code is None:
return json({"message": "找不到验证码信息,可能已过期"}, 401)
else:
if code != captcha_code:
return json({"message": "验证码错误"}, 401)
else:
return await func(request, *args, **kwargs)
return handler
return decorator
| {
"repo_name": "Basic-Components/auth-center",
"path": "auth-center/App/decorator/captcha_check_dec.py",
"copies": "1",
"size": "1372",
"license": "mit",
"hash": -6694876689593031000,
"line_mean": 42.9333333333,
"line_max": 88,
"alpha_frac": 0.5159332322,
"autogenerated": false,
"ratio": 4.197452229299363,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5213385461499362,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from sanic.response import json
from sanic_redis import Namespace
from App.model import User,Role
def authorized():
def decorator(func):
@wraps(func)
async def handler(request, *args, **kwargs):
# Middleware goes here
namespace = Namespace(request.app.name+"-auth_token")
try:
token = request.headers["Authorization"]
except KeyError as ke:
return json({"message":"没有验证token"},401)
token_info = request.app.serializer.loads(token)
try:
nowuser = await User.get(User._id==token_info["_id"])
except Exception as e:
return json({"message":"token指向的用户不存在"},401)
else:
try:
value = await request.app.redis["auth_token"].get(namespace(token_info["_id"]))
except Exception as e:
print("_________")
print(e.message)
return json({"message":"token已过期"},401)
else:
if value != token.encode("utf-8"):
print(value)
print(token)
return json({"message":"token过期已更改"},401)
else:
request.args['auth_id'] = token_info["_id"]
request.args['auth_roles'] = token_info["roles"]
return await func(request, *args, **kwargs)
return handler
return decorator
| {
"repo_name": "Basic-Components/auth-center",
"path": "auth-center/App/decorator/auth_check_dec.py",
"copies": "1",
"size": "1606",
"license": "mit",
"hash": -7064108630368697000,
"line_mean": 39.1538461538,
"line_max": 99,
"alpha_frac": 0.4955300128,
"autogenerated": false,
"ratio": 4.646884272997033,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0062205702056866855,
"num_lines": 39
} |
from functools import wraps
from ..selection import Selection, Interval
from . import SelectModes
def partial(func, *args, docs='', **keywords):
"""Pragmatic solution for being able to set a metadata for a partial function"""
@wraps(func)
def wrapper(*fargs, **fkeywords):
newkeywords = keywords.copy()
newkeywords.update(fkeywords)
return func(*(args + fargs), **newkeywords)
wrapper.func = func
wrapper.args = args
wrapper.keywords = keywords
wrapper.docs = docs
return wrapper
def selector(function):
"""Turn given selector in a command that takes a document."""
@wraps(function)
def wrapper(doc, *args, selection=None, selectmode=None, preview=False, **kwargs):
selection = selection or doc.selection
selectmode = selectmode or doc.selectmode
result = function(
doc, *args, selection=selection, selectmode=selectmode, **kwargs)
if preview:
return result
if result != None:
result(doc)
return wrapper
def intervalselector(function):
"""Turn given intervalselector in a command that takes a document."""
@wraps(function)
@selector
def wrapper(doc, selection, *args, selectmode=None, **kwargs):
new_intervals = []
for interval in selection:
new_interval = function(doc, interval, *args, selectmode=selectmode, **kwargs)
if new_interval == None:
return
new_intervals.append(new_interval)
return Selection(new_intervals)
return wrapper
def intervalselector_withmode(function):
"""
Turn given intervalselector in a command that takes a document and process
according to selectmode.
"""
@wraps(function)
@intervalselector
def wrapper(doc, interval, *args, selectmode=None, **kwargs):
# Give different interval based on selectmode
beg, end = interval
if selectmode == SelectModes.head:
proxy_interval = Interval(end, end)
elif selectmode == SelectModes.tail:
proxy_interval = Interval(beg, beg)
else:
proxy_interval = interval
new_interval = function(doc, proxy_interval, *args, **kwargs)
if new_interval == None:
return
# Process interval differently based on selectmode
nbeg, nend = new_interval
if selectmode == SelectModes.head:
# beg is fixed, but end is determined by new interval
if nend <= end:
return Interval(beg, max(beg, nbeg))
else:
return Interval(beg, max(beg, nend))
elif selectmode == SelectModes.tail:
# end is fixed, but beg is determined by new interval
if nbeg >= beg:
return Interval(min(end, nend), end)
else:
return Interval(min(end, nbeg), end)
else:
return new_interval
return wrapper
| {
"repo_name": "Chiel92/fate",
"path": "fate/selecting/decorators.py",
"copies": "1",
"size": "2988",
"license": "mit",
"hash": 3399254944417939500,
"line_mean": 32.2,
"line_max": 90,
"alpha_frac": 0.6161311914,
"autogenerated": false,
"ratio": 4.413589364844904,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5529720556244904,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from six.moves.urllib import parse
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import user_passes_test
from django.shortcuts import render
from django.contrib import messages
from django.contrib.auth.views import redirect_to_login
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
def staff_member_required(view_func, login_url=None):
"""
Ensure that the user is a logged-in staff member.
* If not authenticated, redirect to a specified login URL.
* If not staff, show a 403 page
This decorator is based on the decorator with the same name from
django.contrib.admin.views.decorators. This one is superior as it allows a
redirect URL to be specified.
"""
if login_url is None:
login_url = reverse_lazy('customer:login')
@wraps(view_func)
def _checklogin(request, *args, **kwargs):
if request.user.is_active and request.user.is_staff:
return view_func(request, *args, **kwargs)
# If user is not logged in, redirect to login page
if not request.user.is_authenticated():
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
path = request.build_absolute_uri()
login_scheme, login_netloc = parse.urlparse(login_url)[:2]
current_scheme, current_netloc = parse.urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
messages.warning(request, _("You must log in to access this page"))
return redirect_to_login(path, login_url, REDIRECT_FIELD_NAME)
else:
# User does not have permission to view this page
raise PermissionDenied
return _checklogin
def check_permissions(user, permissions):
"""
Permissions can be a list or a tuple of lists. If it is a tuple,
every permission list will be evaluated and the outcome will be checked
for truthiness.
Each item of the list(s) must be either a valid Django permission name
(model.codename) or a property or method on the User model
(e.g. 'is_active', 'is_superuser').
Example usage:
- permissions_required(['is_staff', ])
would replace staff_member_required
- permissions_required(['is_anonymous', ])
would replace login_forbidden
- permissions_required((['is_staff',], ['partner.dashboard_access']))
allows both staff users and users with the above permission
"""
def _check_one_permission_list(perms):
regular_permissions = [perm for perm in perms if '.' in perm]
conditions = [perm for perm in perms if '.' not in perm]
# always check for is_active if not checking for is_anonymous
if (conditions and
'is_anonymous' not in conditions and
'is_active' not in conditions):
conditions.append('is_active')
attributes = [getattr(user, perm) for perm in conditions]
# evaluates methods, explicitly casts properties to booleans
passes_conditions = all([
attr() if callable(attr) else bool(attr) for attr in attributes])
return passes_conditions and user.has_perms(regular_permissions)
if not permissions:
return True
elif isinstance(permissions, list):
return _check_one_permission_list(permissions)
else:
return any(_check_one_permission_list(perm) for perm in permissions)
def permissions_required(permissions, login_url=None):
"""
Decorator that checks if a user has the given permissions.
Accepts a list or tuple of lists of permissions (see check_permissions
documentation).
If the user is not logged in and the test fails, she is redirected to a
login page. If the user is logged in, she gets a HTTP 403 Permission Denied
message, analogous to Django's permission_required decorator.
"""
if login_url is None:
login_url = reverse_lazy('customer:login')
def _check_permissions(user):
outcome = check_permissions(user, permissions)
if not outcome and user.is_authenticated():
raise PermissionDenied
else:
return outcome
return user_passes_test(_check_permissions, login_url=login_url)
def login_forbidden(view_func, template_name='login_forbidden.html',
status=403):
"""
Only allow anonymous users to access this view.
"""
@wraps(view_func)
def _checklogin(request, *args, **kwargs):
if not request.user.is_authenticated():
return view_func(request, *args, **kwargs)
return render(request, template_name, status=status)
return _checklogin
| {
"repo_name": "marcoantoniooliveira/labweb",
"path": "oscar/views/decorators.py",
"copies": "1",
"size": "4985",
"license": "bsd-3-clause",
"hash": 6807963652035719000,
"line_mean": 38.88,
"line_max": 79,
"alpha_frac": 0.6702106319,
"autogenerated": false,
"ratio": 4.3235039028620985,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 125
} |
from functools import wraps
from six.moves.urllib.parse import urlparse
import time
import uuid
from twisted.internet import defer
"""
Endpoints decorated with @trace will add an extra parameter, ctx. ctx needs to be down-propagated to all
outgoing requests for request tracing.
The @profile decorator provides additional profiling capabilities to endpoints decorated with @trace.
Ex:
@app.route('/some/path', methods=['GET'])
@tracing.trace
@tracing.profile
def my_handler(ctx, request, ...):
"""
_logger = None
_debug = False
_exceptions_to_ignore = []
def set_logger(logger):
global _logger
_logger = logger
def set_debug(debug):
global _debug
_debug = debug
def set_exceptions_to_ignore(*exceptions):
global _exceptions_to_ignore
for e in exceptions:
if not issubclass(e, Exception):
raise TypeError('%s is not an Exception' % e)
_exceptions_to_ignore = exceptions
def trace(f):
"""
Insert context into service and add request-id header for tracing.
The request-id is extracted from the X-Request-ID header or, if not present, it is generated.
"""
f = ensure_is_deferred(f)
@wraps(f)
@defer.inlineCallbacks
def wrapper(request, *args, **kwargs):
ctx = {'request-id': request.getHeader('X-Request-ID') or str(uuid.uuid4())}
request.requestHeaders.addRawHeader('X-Request-ID', ctx['request-id'])
res = yield f(ctx, request, *args, **kwargs)
defer.returnValue(res)
return wrapper
def profile(f):
"""
Decorator for enabling basic instrumentation.
The wrapped function now receives an additional parameter of type dictionary, ctx.
Any entries added to ctx will be logged along with default logging fields:
user_agent, endpoint path, and request duration.
NOTE: This decorator requires the @trace decorator
"""
assert(_logger is not None)
f = ensure_is_deferred(f)
@wraps(f)
def wrapper(ctx, request, *args, **kwargs):
ctx['user_agent'] = request.getHeader('User-Agent')
ctx['endpoint'] = urlparse(request.uri).path
start = time.time()
def do_finally(param):
ctx['took'] = time.time() - start
if _debug:
_logger.info('txhttptrace debug request print', **ctx)
return param
def handle_error(failure):
if not failure.check(*_exceptions_to_ignore):
ctx['error'] = failure.getErrorMessage()
ctx['traceback'] = str(failure.getTraceback(elideFrameworkCode=True, detail='brief'))
return failure
d = f(ctx, request, *args, **kwargs)
d.addErrback(handle_error)
d.addBoth(do_finally)
return d
return wrapper
def ensure_is_deferred(f):
@wraps(f)
def _ensure_is_deferred(*args, **kwargs):
return defer.maybeDeferred(f, *args, **kwargs)
return _ensure_is_deferred
| {
"repo_name": "wrapp/txhttptrace",
"path": "txhttptrace.py",
"copies": "1",
"size": "2940",
"license": "mit",
"hash": 3317605844120709000,
"line_mean": 26.476635514,
"line_max": 104,
"alpha_frac": 0.6523809524,
"autogenerated": false,
"ratio": 3.93048128342246,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0016165581226573282,
"num_lines": 107
} |
from functools import wraps
from statsdecor import client
def increment(name, tags=None):
"""Function decorator for incrementing a statsd stat whenever
a function is invoked.
>>> from statsdecor.decorators import increment
>>> @increment('my.metric')
>>> def my_func():
>>> pass
"""
def wrap(f):
@wraps(f)
def decorator(*args, **kwargs):
stats = client()
ret = f(*args, **kwargs)
stats.incr(name, tags=tags)
return ret
return decorator
return wrap
def decrement(name, tags=None):
"""Function decorator for decrementing a statsd stat whenever
a function is invoked.
>>> from statsdecor.decorators import decrement
>>> @decrement('my.metric')
>>> def my_func():
>>> pass
"""
def wrap(f):
@wraps(f)
def decorator(*args, **kwargs):
stats = client()
ret = f(*args, **kwargs)
stats.decr(name, tags=tags)
return ret
return decorator
return wrap
def timed(name, tags=None):
"""Function decorator for tracking timing information
on a function's invocation.
>>> from statsdecor.decorators import timed
>>> @timed('my.metric')
>>> def my_func():
>>> pass
"""
def wrap(f):
@wraps(f)
def decorator(*args, **kwargs):
stats = client()
with stats.timer(name, tags=tags):
return f(*args, **kwargs)
return decorator
return wrap
| {
"repo_name": "freshbooks/statsdecor",
"path": "statsdecor/decorators.py",
"copies": "1",
"size": "1547",
"license": "mit",
"hash": -2993586048445057500,
"line_mean": 24.3606557377,
"line_max": 65,
"alpha_frac": 0.5617323853,
"autogenerated": false,
"ratio": 4.32122905027933,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.538296143557933,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from string import ascii_lowercase
from termcolor import cprint
from twisted.internet import defer, reactor
from rss.core import fetch
commands = {}
def command(name=None):
def add_command(fn):
commands[name or fn.__name__] = fn
return fn
return add_command
def main(arguments):
# XXX: A hack until docopt properly supports subcommands / dispatching
run_command(arguments).addCallback(lambda succeeded : reactor.stop())
reactor.run()
def run_command(arguments):
command = next(
cmd for cmd, selected in arguments.iteritems()
if cmd[0] in ascii_lowercase and selected
)
return commands[command](arguments)
@command()
def get(arguments):
d = []
for url in arguments["<url>"]:
d.append(fetch(url).addCallback(print_feed))
return defer.gatherResults(d, consumeErrors=True)
@command()
def add(arguments):
pass
@command()
def list(arguments):
pass
@command()
def rm(arguments):
pass
def print_feed(feed):
cprint(feed["feed"]["title"], "blue")
for entry in feed["entries"]:
if "title" in entry:
print entry["title"]
else:
print entry["description"]
| {
"repo_name": "gwhitehawk/RSS",
"path": "rss/script.py",
"copies": "1",
"size": "1233",
"license": "mit",
"hash": 178001663963536540,
"line_mean": 18.8870967742,
"line_max": 74,
"alpha_frac": 0.6561232766,
"autogenerated": false,
"ratio": 3.926751592356688,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5082874868956688,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from string import digits
from pyvi.mode import Mode
from pyvi.modes import insert
def motion(fn):
@wraps(fn)
def move(editor):
cursor = editor.active_window.cursor
moved_to = fn(editor, count=editor.count or 1)
# XXX: do this on any executed command
editor.count = None
command = editor._command
if command is not None:
command(editor, motion=moved_to)
editor._command = None
else:
cursor.coords = moved_to
cursor.trim()
return move
def operator(fn):
@wraps(fn)
def operate(editor):
editor._command = fn
return operate
def keypress(mode, editor, key):
if key == "esc":
return
elif key in digits:
editor.count = (editor.count or 0) * 10 + int(key)
else:
mode.dispatch(editor, [key])
normal = Mode(name="Normal", keypress=keypress)
@normal.handles("d")
@operator
def d(editor, motion):
buffer = editor.active_window.buffer
row, column = editor.active_window.cursor
end_row, end_column = motion
if row == end_row:
line = buffer.lines[row]
buffer.lines[row] = line[:column] + line[end_column:]
else:
buffer.lines[row] = buffer.lines[row][:column]
buffer.lines[row + 1:end_row] = []
buffer.lines[end_row] = buffer.lines[row][end_column:]
@normal.handles("h")
@motion
def h(editor, count):
row, column = editor.active_window.cursor
return row, column - count
@normal.handles("i")
def i(editor):
editor.mode = insert
@normal.handles("j")
@motion
def j(editor, count):
row, column = editor.active_window.cursor
return row + count, column
@normal.handles("k")
@motion
def k(editor, count):
row, column = editor.active_window.cursor
return row - count, column
@normal.handles("l")
@motion
def l(editor, count):
row, column = editor.active_window.cursor
return row, column + count
| {
"repo_name": "Julian/PyVi",
"path": "pyvi/modes/_normal.py",
"copies": "1",
"size": "1990",
"license": "mit",
"hash": -6219647325813957000,
"line_mean": 20.8681318681,
"line_max": 62,
"alpha_frac": 0.6261306533,
"autogenerated": false,
"ratio": 3.5535714285714284,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9674707076876423,
"avg_score": 0.000999000999000999,
"num_lines": 91
} |
from functools import wraps
from StringIO import StringIO # No need for cStringIO at this time
import sys
def mock_streams(*which):
"""
Replaces a stream with a ``StringIO`` during the test, then restores after.
Must specify which stream (stdout, stderr, etc) via string args, e.g.::
@mock_streams('stdout')
def func():
pass
@mock_streams('stderr')
def func():
pass
@mock_streams('stdout', 'stderr')
def func()
pass
"""
def mocked_streams_decorator(func):
@wraps(func)
def inner_wrapper(*args, **kwargs):
if 'stdout' in which:
my_stdout, sys.stdout = sys.stdout, StringIO()
if 'stderr' in which:
my_stderr, sys.stderr = sys.stderr, StringIO()
result = func(*args, **kwargs)
if 'stderr' in which:
sys.stderr = my_stderr
if 'stdout' in which:
sys.stdout = my_stdout
return result
return inner_wrapper
return mocked_streams_decorator
| {
"repo_name": "simon-engledew/fabric",
"path": "tests/utils.py",
"copies": "1",
"size": "1105",
"license": "bsd-2-clause",
"hash": -2439784212568234000,
"line_mean": 28.0789473684,
"line_max": 79,
"alpha_frac": 0.5511312217,
"autogenerated": false,
"ratio": 4.455645161290323,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5506776382990323,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from sys import getsizeof
from typing import (
TYPE_CHECKING,
Any,
Callable,
Hashable,
Iterable,
List,
Optional,
Sequence,
Tuple,
Union,
)
import warnings
import numpy as np
from pandas._config import get_option
from pandas._libs import algos as libalgos, index as libindex, lib
from pandas._libs.hashtable import duplicated_int64
from pandas._typing import AnyArrayLike, Label, Scalar
from pandas.compat.numpy import function as nv
from pandas.errors import InvalidIndexError, PerformanceWarning, UnsortedIndexError
from pandas.util._decorators import Appender, cache_readonly, doc
from pandas.core.dtypes.cast import coerce_indexer_dtype
from pandas.core.dtypes.common import (
ensure_int64,
ensure_platform_int,
is_categorical_dtype,
is_hashable,
is_integer,
is_iterator,
is_list_like,
is_object_dtype,
is_scalar,
pandas_dtype,
)
from pandas.core.dtypes.dtypes import ExtensionDtype
from pandas.core.dtypes.generic import ABCDataFrame, ABCDatetimeIndex, ABCTimedeltaIndex
from pandas.core.dtypes.missing import array_equivalent, isna
import pandas.core.algorithms as algos
from pandas.core.arrays import Categorical
from pandas.core.arrays.categorical import factorize_from_iterables
import pandas.core.common as com
import pandas.core.indexes.base as ibase
from pandas.core.indexes.base import Index, _index_shared_docs, ensure_index
from pandas.core.indexes.frozen import FrozenList
from pandas.core.indexes.numeric import Int64Index
import pandas.core.missing as missing
from pandas.core.ops.invalid import make_invalid_op
from pandas.core.sorting import (
get_group_index,
indexer_from_factorized,
lexsort_indexer,
)
from pandas.io.formats.printing import (
format_object_attrs,
format_object_summary,
pprint_thing,
)
if TYPE_CHECKING:
from pandas import Series # noqa:F401
_index_doc_kwargs = dict(ibase._index_doc_kwargs)
_index_doc_kwargs.update(
dict(klass="MultiIndex", target_klass="MultiIndex or list of tuples")
)
class MultiIndexUIntEngine(libindex.BaseMultiIndexCodesEngine, libindex.UInt64Engine):
"""
This class manages a MultiIndex by mapping label combinations to positive
integers.
"""
_base = libindex.UInt64Engine
def _codes_to_ints(self, codes):
"""
Transform combination(s) of uint64 in one uint64 (each), in a strictly
monotonic way (i.e. respecting the lexicographic order of integer
combinations): see BaseMultiIndexCodesEngine documentation.
Parameters
----------
codes : 1- or 2-dimensional array of dtype uint64
Combinations of integers (one per row)
Returns
-------
scalar or 1-dimensional array, of dtype uint64
Integer(s) representing one combination (each).
"""
# Shift the representation of each level by the pre-calculated number
# of bits:
codes <<= self.offsets
# Now sum and OR are in fact interchangeable. This is a simple
# composition of the (disjunct) significant bits of each level (i.e.
# each column in "codes") in a single positive integer:
if codes.ndim == 1:
# Single key
return np.bitwise_or.reduce(codes)
# Multiple keys
return np.bitwise_or.reduce(codes, axis=1)
class MultiIndexPyIntEngine(libindex.BaseMultiIndexCodesEngine, libindex.ObjectEngine):
"""
This class manages those (extreme) cases in which the number of possible
label combinations overflows the 64 bits integers, and uses an ObjectEngine
containing Python integers.
"""
_base = libindex.ObjectEngine
def _codes_to_ints(self, codes):
"""
Transform combination(s) of uint64 in one Python integer (each), in a
strictly monotonic way (i.e. respecting the lexicographic order of
integer combinations): see BaseMultiIndexCodesEngine documentation.
Parameters
----------
codes : 1- or 2-dimensional array of dtype uint64
Combinations of integers (one per row)
Returns
-------
int, or 1-dimensional array of dtype object
Integer(s) representing one combination (each).
"""
# Shift the representation of each level by the pre-calculated number
# of bits. Since this can overflow uint64, first make sure we are
# working with Python integers:
codes = codes.astype("object") << self.offsets
# Now sum and OR are in fact interchangeable. This is a simple
# composition of the (disjunct) significant bits of each level (i.e.
# each column in "codes") in a single positive integer (per row):
if codes.ndim == 1:
# Single key
return np.bitwise_or.reduce(codes)
# Multiple keys
return np.bitwise_or.reduce(codes, axis=1)
def names_compat(meth):
"""
A decorator to allow either `name` or `names` keyword but not both.
This makes it easier to share code with base class.
"""
@wraps(meth)
def new_meth(self_or_cls, *args, **kwargs):
if "name" in kwargs and "names" in kwargs:
raise TypeError("Can only provide one of `names` and `name`")
elif "name" in kwargs:
kwargs["names"] = kwargs.pop("name")
return meth(self_or_cls, *args, **kwargs)
return new_meth
class MultiIndex(Index):
"""
A multi-level, or hierarchical, index object for pandas objects.
Parameters
----------
levels : sequence of arrays
The unique labels for each level.
codes : sequence of arrays
Integers for each level designating which label at each location.
.. versionadded:: 0.24.0
sortorder : optional int
Level of sortedness (must be lexicographically sorted by that
level).
names : optional sequence of objects
Names for each of the index levels. (name is accepted for compat).
copy : bool, default False
Copy the meta-data.
verify_integrity : bool, default True
Check that the levels/codes are consistent and valid.
Attributes
----------
names
levels
codes
nlevels
levshape
Methods
-------
from_arrays
from_tuples
from_product
from_frame
set_levels
set_codes
to_frame
to_flat_index
is_lexsorted
sortlevel
droplevel
swaplevel
reorder_levels
remove_unused_levels
get_locs
See Also
--------
MultiIndex.from_arrays : Convert list of arrays to MultiIndex.
MultiIndex.from_product : Create a MultiIndex from the cartesian product
of iterables.
MultiIndex.from_tuples : Convert list of tuples to a MultiIndex.
MultiIndex.from_frame : Make a MultiIndex from a DataFrame.
Index : The base pandas Index type.
Notes
-----
See the `user guide
<https://pandas.pydata.org/pandas-docs/stable/user_guide/advanced.html>`_
for more.
Examples
--------
A new ``MultiIndex`` is typically constructed using one of the helper
methods :meth:`MultiIndex.from_arrays`, :meth:`MultiIndex.from_product`
and :meth:`MultiIndex.from_tuples`. For example (using ``.from_arrays``):
>>> arrays = [[1, 1, 2, 2], ['red', 'blue', 'red', 'blue']]
>>> pd.MultiIndex.from_arrays(arrays, names=('number', 'color'))
MultiIndex([(1, 'red'),
(1, 'blue'),
(2, 'red'),
(2, 'blue')],
names=['number', 'color'])
See further examples for how to construct a MultiIndex in the doc strings
of the mentioned helper methods.
"""
_deprecations = Index._deprecations | frozenset()
# initialize to zero-length tuples to make everything work
_typ = "multiindex"
_names = FrozenList()
_levels = FrozenList()
_codes = FrozenList()
_comparables = ["names"]
rename = Index.set_names
sortorder: Optional[int]
# --------------------------------------------------------------------
# Constructors
def __new__(
cls,
levels=None,
codes=None,
sortorder=None,
names=None,
dtype=None,
copy=False,
name=None,
verify_integrity: bool = True,
_set_identity: bool = True,
):
# compat with Index
if name is not None:
names = name
if levels is None or codes is None:
raise TypeError("Must pass both levels and codes")
if len(levels) != len(codes):
raise ValueError("Length of levels and codes must be the same.")
if len(levels) == 0:
raise ValueError("Must pass non-zero number of levels/codes")
result = object.__new__(MultiIndex)
result._cache = {}
# we've already validated levels and codes, so shortcut here
result._set_levels(levels, copy=copy, validate=False)
result._set_codes(codes, copy=copy, validate=False)
result._names = [None] * len(levels)
if names is not None:
# handles name validation
result._set_names(names)
if sortorder is not None:
result.sortorder = int(sortorder)
else:
result.sortorder = sortorder
if verify_integrity:
new_codes = result._verify_integrity()
result._codes = new_codes
if _set_identity:
result._reset_identity()
return result
def _validate_codes(self, level: List, code: List):
"""
Reassign code values as -1 if their corresponding levels are NaN.
Parameters
----------
code : list
Code to reassign.
level : list
Level to check for missing values (NaN, NaT, None).
Returns
-------
new code where code value = -1 if it corresponds
to a level with missing values (NaN, NaT, None).
"""
null_mask = isna(level)
if np.any(null_mask):
code = np.where(null_mask[code], -1, code)
return code
def _verify_integrity(
self, codes: Optional[List] = None, levels: Optional[List] = None
):
"""
Parameters
----------
codes : optional list
Codes to check for validity. Defaults to current codes.
levels : optional list
Levels to check for validity. Defaults to current levels.
Raises
------
ValueError
If length of levels and codes don't match, if the codes for any
level would exceed level bounds, or there are any duplicate levels.
Returns
-------
new codes where code value = -1 if it corresponds to a
NaN level.
"""
# NOTE: Currently does not check, among other things, that cached
# nlevels matches nor that sortorder matches actually sortorder.
codes = codes or self.codes
levels = levels or self.levels
if len(levels) != len(codes):
raise ValueError(
"Length of levels and codes must match. NOTE: "
"this index is in an inconsistent state."
)
codes_length = len(codes[0])
for i, (level, level_codes) in enumerate(zip(levels, codes)):
if len(level_codes) != codes_length:
raise ValueError(
f"Unequal code lengths: {[len(code_) for code_ in codes]}"
)
if len(level_codes) and level_codes.max() >= len(level):
raise ValueError(
f"On level {i}, code max ({level_codes.max()}) >= length of "
f"level ({len(level)}). NOTE: this index is in an "
"inconsistent state"
)
if len(level_codes) and level_codes.min() < -1:
raise ValueError(f"On level {i}, code value ({level_codes.min()}) < -1")
if not level.is_unique:
raise ValueError(
f"Level values must be unique: {list(level)} on level {i}"
)
if self.sortorder is not None:
if self.sortorder > self._lexsort_depth():
raise ValueError(
"Value for sortorder must be inferior or equal to actual "
f"lexsort_depth: sortorder {self.sortorder} "
f"with lexsort_depth {self._lexsort_depth()}"
)
codes = [
self._validate_codes(level, code) for level, code in zip(levels, codes)
]
new_codes = FrozenList(codes)
return new_codes
@classmethod
def from_arrays(cls, arrays, sortorder=None, names=lib.no_default) -> "MultiIndex":
"""
Convert arrays to MultiIndex.
Parameters
----------
arrays : list / sequence of array-likes
Each array-like gives one level's value for each data point.
len(arrays) is the number of levels.
sortorder : int or None
Level of sortedness (must be lexicographically sorted by that
level).
names : list / sequence of str, optional
Names for the levels in the index.
Returns
-------
MultiIndex
See Also
--------
MultiIndex.from_tuples : Convert list of tuples to MultiIndex.
MultiIndex.from_product : Make a MultiIndex from cartesian product
of iterables.
MultiIndex.from_frame : Make a MultiIndex from a DataFrame.
Examples
--------
>>> arrays = [[1, 1, 2, 2], ['red', 'blue', 'red', 'blue']]
>>> pd.MultiIndex.from_arrays(arrays, names=('number', 'color'))
MultiIndex([(1, 'red'),
(1, 'blue'),
(2, 'red'),
(2, 'blue')],
names=['number', 'color'])
"""
error_msg = "Input must be a list / sequence of array-likes."
if not is_list_like(arrays):
raise TypeError(error_msg)
elif is_iterator(arrays):
arrays = list(arrays)
# Check if elements of array are list-like
for array in arrays:
if not is_list_like(array):
raise TypeError(error_msg)
# Check if lengths of all arrays are equal or not,
# raise ValueError, if not
for i in range(1, len(arrays)):
if len(arrays[i]) != len(arrays[i - 1]):
raise ValueError("all arrays must be same length")
codes, levels = factorize_from_iterables(arrays)
if names is lib.no_default:
names = [getattr(arr, "name", None) for arr in arrays]
return MultiIndex(
levels=levels,
codes=codes,
sortorder=sortorder,
names=names,
verify_integrity=False,
)
@classmethod
@names_compat
def from_tuples(
cls,
tuples,
sortorder: Optional[int] = None,
names: Optional[Sequence[Label]] = None,
):
"""
Convert list of tuples to MultiIndex.
Parameters
----------
tuples : list / sequence of tuple-likes
Each tuple is the index of one row/column.
sortorder : int or None
Level of sortedness (must be lexicographically sorted by that
level).
names : list / sequence of str, optional
Names for the levels in the index.
Returns
-------
MultiIndex
See Also
--------
MultiIndex.from_arrays : Convert list of arrays to MultiIndex.
MultiIndex.from_product : Make a MultiIndex from cartesian product
of iterables.
MultiIndex.from_frame : Make a MultiIndex from a DataFrame.
Examples
--------
>>> tuples = [(1, 'red'), (1, 'blue'),
... (2, 'red'), (2, 'blue')]
>>> pd.MultiIndex.from_tuples(tuples, names=('number', 'color'))
MultiIndex([(1, 'red'),
(1, 'blue'),
(2, 'red'),
(2, 'blue')],
names=['number', 'color'])
"""
if not is_list_like(tuples):
raise TypeError("Input must be a list / sequence of tuple-likes.")
elif is_iterator(tuples):
tuples = list(tuples)
arrays: List[Sequence[Label]]
if len(tuples) == 0:
if names is None:
raise TypeError("Cannot infer number of levels from empty list")
arrays = [[]] * len(names)
elif isinstance(tuples, (np.ndarray, Index)):
if isinstance(tuples, Index):
tuples = tuples._values
arrays = list(lib.tuples_to_object_array(tuples).T)
elif isinstance(tuples, list):
arrays = list(lib.to_object_array_tuples(tuples).T)
else:
arrays = zip(*tuples)
return MultiIndex.from_arrays(arrays, sortorder=sortorder, names=names)
@classmethod
def from_product(cls, iterables, sortorder=None, names=lib.no_default):
"""
Make a MultiIndex from the cartesian product of multiple iterables.
Parameters
----------
iterables : list / sequence of iterables
Each iterable has unique labels for each level of the index.
sortorder : int or None
Level of sortedness (must be lexicographically sorted by that
level).
names : list / sequence of str, optional
Names for the levels in the index.
.. versionchanged:: 1.0.0
If not explicitly provided, names will be inferred from the
elements of iterables if an element has a name attribute
Returns
-------
MultiIndex
See Also
--------
MultiIndex.from_arrays : Convert list of arrays to MultiIndex.
MultiIndex.from_tuples : Convert list of tuples to MultiIndex.
MultiIndex.from_frame : Make a MultiIndex from a DataFrame.
Examples
--------
>>> numbers = [0, 1, 2]
>>> colors = ['green', 'purple']
>>> pd.MultiIndex.from_product([numbers, colors],
... names=['number', 'color'])
MultiIndex([(0, 'green'),
(0, 'purple'),
(1, 'green'),
(1, 'purple'),
(2, 'green'),
(2, 'purple')],
names=['number', 'color'])
"""
from pandas.core.reshape.util import cartesian_product
if not is_list_like(iterables):
raise TypeError("Input must be a list / sequence of iterables.")
elif is_iterator(iterables):
iterables = list(iterables)
codes, levels = factorize_from_iterables(iterables)
if names is lib.no_default:
names = [getattr(it, "name", None) for it in iterables]
# codes are all ndarrays, so cartesian_product is lossless
codes = cartesian_product(codes)
return MultiIndex(levels, codes, sortorder=sortorder, names=names)
@classmethod
def from_frame(cls, df, sortorder=None, names=None):
"""
Make a MultiIndex from a DataFrame.
.. versionadded:: 0.24.0
Parameters
----------
df : DataFrame
DataFrame to be converted to MultiIndex.
sortorder : int, optional
Level of sortedness (must be lexicographically sorted by that
level).
names : list-like, optional
If no names are provided, use the column names, or tuple of column
names if the columns is a MultiIndex. If a sequence, overwrite
names with the given sequence.
Returns
-------
MultiIndex
The MultiIndex representation of the given DataFrame.
See Also
--------
MultiIndex.from_arrays : Convert list of arrays to MultiIndex.
MultiIndex.from_tuples : Convert list of tuples to MultiIndex.
MultiIndex.from_product : Make a MultiIndex from cartesian product
of iterables.
Examples
--------
>>> df = pd.DataFrame([['HI', 'Temp'], ['HI', 'Precip'],
... ['NJ', 'Temp'], ['NJ', 'Precip']],
... columns=['a', 'b'])
>>> df
a b
0 HI Temp
1 HI Precip
2 NJ Temp
3 NJ Precip
>>> pd.MultiIndex.from_frame(df)
MultiIndex([('HI', 'Temp'),
('HI', 'Precip'),
('NJ', 'Temp'),
('NJ', 'Precip')],
names=['a', 'b'])
Using explicit names, instead of the column names
>>> pd.MultiIndex.from_frame(df, names=['state', 'observation'])
MultiIndex([('HI', 'Temp'),
('HI', 'Precip'),
('NJ', 'Temp'),
('NJ', 'Precip')],
names=['state', 'observation'])
"""
if not isinstance(df, ABCDataFrame):
raise TypeError("Input must be a DataFrame")
column_names, columns = zip(*df.items())
names = column_names if names is None else names
return cls.from_arrays(columns, sortorder=sortorder, names=names)
# --------------------------------------------------------------------
@cache_readonly
def _values(self):
# We override here, since our parent uses _data, which we don't use.
values = []
for i in range(self.nlevels):
vals = self._get_level_values(i)
if is_categorical_dtype(vals.dtype):
vals = vals._internal_get_values()
if isinstance(vals.dtype, ExtensionDtype) or isinstance(
vals, (ABCDatetimeIndex, ABCTimedeltaIndex)
):
vals = vals.astype(object)
vals = np.array(vals, copy=False)
values.append(vals)
arr = lib.fast_zip(values)
return arr
@property
def values(self):
return self._values
@property
def array(self):
"""
Raises a ValueError for `MultiIndex` because there's no single
array backing a MultiIndex.
Raises
------
ValueError
"""
raise ValueError(
"MultiIndex has no single backing array. Use "
"'MultiIndex.to_numpy()' to get a NumPy array of tuples."
)
@property
def shape(self):
"""
Return a tuple of the shape of the underlying data.
"""
# overriding the base Index.shape definition to avoid materializing
# the values (GH-27384, GH-27775)
return (len(self),)
def __len__(self) -> int:
return len(self.codes[0])
# --------------------------------------------------------------------
# Levels Methods
@cache_readonly
def levels(self):
# Use cache_readonly to ensure that self.get_locs doesn't repeatedly
# create new IndexEngine
# https://github.com/pandas-dev/pandas/issues/31648
result = [
x._shallow_copy(name=name) for x, name in zip(self._levels, self._names)
]
for level in result:
# disallow midx.levels[0].name = "foo"
level._no_setting_name = True
return FrozenList(result)
def _set_levels(
self,
levels,
level=None,
copy: bool = False,
validate: bool = True,
verify_integrity: bool = False,
) -> None:
# This is NOT part of the levels property because it should be
# externally not allowed to set levels. User beware if you change
# _levels directly
if validate:
if len(levels) == 0:
raise ValueError("Must set non-zero number of levels.")
if level is None and len(levels) != self.nlevels:
raise ValueError("Length of levels must match number of levels.")
if level is not None and len(levels) != len(level):
raise ValueError("Length of levels must match length of level.")
if level is None:
new_levels = FrozenList(
ensure_index(lev, copy=copy)._shallow_copy() for lev in levels
)
else:
level_numbers = [self._get_level_number(lev) for lev in level]
new_levels_list = list(self._levels)
for lev_num, lev in zip(level_numbers, levels):
new_levels_list[lev_num] = ensure_index(lev, copy=copy)._shallow_copy()
new_levels = FrozenList(new_levels_list)
if verify_integrity:
new_codes = self._verify_integrity(levels=new_levels)
self._codes = new_codes
names = self.names
self._levels = new_levels
if any(names):
self._set_names(names)
self._reset_cache()
def set_levels(self, levels, level=None, inplace=None, verify_integrity=True):
"""
Set new levels on MultiIndex. Defaults to returning new index.
Parameters
----------
levels : sequence or list of sequence
New level(s) to apply.
level : int, level name, or sequence of int/level names (default None)
Level(s) to set (None for all levels).
inplace : bool
If True, mutates in place.
.. deprecated:: 1.2.0
verify_integrity : bool, default True
If True, checks that levels and codes are compatible.
Returns
-------
new index (of same type and class...etc)
Examples
--------
>>> idx = pd.MultiIndex.from_tuples(
... [
... (1, "one"),
... (1, "two"),
... (2, "one"),
... (2, "two"),
... (3, "one"),
... (3, "two")
... ],
... names=["foo", "bar"]
... )
>>> idx
MultiIndex([(1, 'one'),
(1, 'two'),
(2, 'one'),
(2, 'two'),
(3, 'one'),
(3, 'two')],
names=['foo', 'bar'])
>>> idx.set_levels([['a', 'b', 'c'], [1, 2]])
MultiIndex([('a', 1),
('a', 2),
('b', 1),
('b', 2),
('c', 1),
('c', 2)],
names=['foo', 'bar'])
>>> idx.set_levels(['a', 'b', 'c'], level=0)
MultiIndex([('a', 'one'),
('a', 'two'),
('b', 'one'),
('b', 'two'),
('c', 'one'),
('c', 'two')],
names=['foo', 'bar'])
>>> idx.set_levels(['a', 'b'], level='bar')
MultiIndex([(1, 'a'),
(1, 'b'),
(2, 'a'),
(2, 'b'),
(3, 'a'),
(3, 'b')],
names=['foo', 'bar'])
If any of the levels passed to ``set_levels()`` exceeds the
existing length, all of the values from that argument will
be stored in the MultiIndex levels, though the values will
be truncated in the MultiIndex output.
>>> idx.set_levels([['a', 'b', 'c'], [1, 2, 3, 4]], level=[0, 1])
MultiIndex([('a', 1),
('a', 2),
('b', 1),
('b', 2),
('c', 1),
('c', 2)],
names=['foo', 'bar'])
>>> idx.set_levels([['a', 'b', 'c'], [1, 2, 3, 4]], level=[0, 1]).levels
FrozenList([['a', 'b', 'c'], [1, 2, 3, 4]])
"""
if inplace is not None:
warnings.warn(
"inplace is deprecated and will be removed in a future version.",
FutureWarning,
stacklevel=2,
)
else:
inplace = False
if is_list_like(levels) and not isinstance(levels, Index):
levels = list(levels)
if level is not None and not is_list_like(level):
if not is_list_like(levels):
raise TypeError("Levels must be list-like")
if is_list_like(levels[0]):
raise TypeError("Levels must be list-like")
level = [level]
levels = [levels]
elif level is None or is_list_like(level):
if not is_list_like(levels) or not is_list_like(levels[0]):
raise TypeError("Levels must be list of lists-like")
if inplace:
idx = self
else:
idx = self._shallow_copy()
idx._reset_identity()
idx._set_levels(
levels, level=level, validate=True, verify_integrity=verify_integrity
)
if not inplace:
return idx
@property
def nlevels(self) -> int:
"""
Integer number of levels in this MultiIndex.
"""
return len(self._levels)
@property
def levshape(self):
"""
A tuple with the length of each level.
"""
return tuple(len(x) for x in self.levels)
# --------------------------------------------------------------------
# Codes Methods
@property
def codes(self):
return self._codes
def _set_codes(
self,
codes,
level=None,
copy: bool = False,
validate: bool = True,
verify_integrity: bool = False,
) -> None:
if validate:
if level is None and len(codes) != self.nlevels:
raise ValueError("Length of codes must match number of levels")
if level is not None and len(codes) != len(level):
raise ValueError("Length of codes must match length of levels.")
if level is None:
new_codes = FrozenList(
_coerce_indexer_frozen(level_codes, lev, copy=copy).view()
for lev, level_codes in zip(self._levels, codes)
)
else:
level_numbers = [self._get_level_number(lev) for lev in level]
new_codes_list = list(self._codes)
for lev_num, level_codes in zip(level_numbers, codes):
lev = self.levels[lev_num]
new_codes_list[lev_num] = _coerce_indexer_frozen(
level_codes, lev, copy=copy
)
new_codes = FrozenList(new_codes_list)
if verify_integrity:
new_codes = self._verify_integrity(codes=new_codes)
self._codes = new_codes
self._reset_cache()
def set_codes(self, codes, level=None, inplace=None, verify_integrity=True):
"""
Set new codes on MultiIndex. Defaults to returning new index.
.. versionadded:: 0.24.0
New name for deprecated method `set_labels`.
Parameters
----------
codes : sequence or list of sequence
New codes to apply.
level : int, level name, or sequence of int/level names (default None)
Level(s) to set (None for all levels).
inplace : bool
If True, mutates in place.
.. deprecated:: 1.2.0
verify_integrity : bool (default True)
If True, checks that levels and codes are compatible.
Returns
-------
new index (of same type and class...etc)
Examples
--------
>>> idx = pd.MultiIndex.from_tuples(
... [(1, "one"), (1, "two"), (2, "one"), (2, "two")], names=["foo", "bar"]
... )
>>> idx
MultiIndex([(1, 'one'),
(1, 'two'),
(2, 'one'),
(2, 'two')],
names=['foo', 'bar'])
>>> idx.set_codes([[1, 0, 1, 0], [0, 0, 1, 1]])
MultiIndex([(2, 'one'),
(1, 'one'),
(2, 'two'),
(1, 'two')],
names=['foo', 'bar'])
>>> idx.set_codes([1, 0, 1, 0], level=0)
MultiIndex([(2, 'one'),
(1, 'two'),
(2, 'one'),
(1, 'two')],
names=['foo', 'bar'])
>>> idx.set_codes([0, 0, 1, 1], level='bar')
MultiIndex([(1, 'one'),
(1, 'one'),
(2, 'two'),
(2, 'two')],
names=['foo', 'bar'])
>>> idx.set_codes([[1, 0, 1, 0], [0, 0, 1, 1]], level=[0, 1])
MultiIndex([(2, 'one'),
(1, 'one'),
(2, 'two'),
(1, 'two')],
names=['foo', 'bar'])
"""
if inplace is not None:
warnings.warn(
"inplace is deprecated and will be removed in a future version.",
FutureWarning,
stacklevel=2,
)
else:
inplace = False
if level is not None and not is_list_like(level):
if not is_list_like(codes):
raise TypeError("Codes must be list-like")
if is_list_like(codes[0]):
raise TypeError("Codes must be list-like")
level = [level]
codes = [codes]
elif level is None or is_list_like(level):
if not is_list_like(codes) or not is_list_like(codes[0]):
raise TypeError("Codes must be list of lists-like")
if inplace:
idx = self
else:
idx = self._shallow_copy()
idx._reset_identity()
idx._set_codes(codes, level=level, verify_integrity=verify_integrity)
if not inplace:
return idx
# --------------------------------------------------------------------
# Index Internals
@cache_readonly
def _engine(self):
# Calculate the number of bits needed to represent labels in each
# level, as log2 of their sizes (including -1 for NaN):
sizes = np.ceil(np.log2([len(l) + 1 for l in self.levels]))
# Sum bit counts, starting from the _right_....
lev_bits = np.cumsum(sizes[::-1])[::-1]
# ... in order to obtain offsets such that sorting the combination of
# shifted codes (one for each level, resulting in a unique integer) is
# equivalent to sorting lexicographically the codes themselves. Notice
# that each level needs to be shifted by the number of bits needed to
# represent the _previous_ ones:
offsets = np.concatenate([lev_bits[1:], [0]]).astype("uint64")
# Check the total number of bits needed for our representation:
if lev_bits[0] > 64:
# The levels would overflow a 64 bit uint - use Python integers:
return MultiIndexPyIntEngine(self.levels, self.codes, offsets)
return MultiIndexUIntEngine(self.levels, self.codes, offsets)
@property
def _constructor(self):
return MultiIndex.from_tuples
@doc(Index._shallow_copy)
def _shallow_copy(
self,
values=None,
name=lib.no_default,
levels=None,
codes=None,
sortorder=None,
names=lib.no_default,
_set_identity: bool = True,
):
if names is not lib.no_default and name is not lib.no_default:
raise TypeError("Can only provide one of `names` and `name`")
elif names is lib.no_default:
names = name if name is not lib.no_default else self.names
if values is not None:
assert levels is None and codes is None
return MultiIndex.from_tuples(values, sortorder=sortorder, names=names)
levels = levels if levels is not None else self.levels
codes = codes if codes is not None else self.codes
result = MultiIndex(
levels=levels,
codes=codes,
sortorder=sortorder,
names=names,
verify_integrity=False,
_set_identity=_set_identity,
)
result._cache = self._cache.copy()
result._cache.pop("levels", None) # GH32669
return result
def symmetric_difference(self, other, result_name=None, sort=None):
# On equal symmetric_difference MultiIndexes the difference is empty.
# Therefore, an empty MultiIndex is returned GH13490
tups = Index.symmetric_difference(self, other, result_name, sort)
if len(tups) == 0:
return MultiIndex(
levels=[[] for _ in range(self.nlevels)],
codes=[[] for _ in range(self.nlevels)],
names=tups.name,
)
return type(self).from_tuples(tups, names=tups.name)
# --------------------------------------------------------------------
def copy(
self,
names=None,
dtype=None,
levels=None,
codes=None,
deep=False,
name=None,
_set_identity=False,
):
"""
Make a copy of this object. Names, dtype, levels and codes can be
passed and will be set on new copy.
Parameters
----------
names : sequence, optional
dtype : numpy dtype or pandas type, optional
.. deprecated:: 1.2.0
levels : sequence, optional
codes : sequence, optional
deep : bool, default False
name : Label
Kept for compatibility with 1-dimensional Index. Should not be used.
Returns
-------
MultiIndex
Notes
-----
In most cases, there should be no functional difference from using
``deep``, but if ``deep`` is passed it will attempt to deepcopy.
This could be potentially expensive on large MultiIndex objects.
"""
names = self._validate_names(name=name, names=names, deep=deep)
if deep:
from copy import deepcopy
if levels is None:
levels = deepcopy(self.levels)
if codes is None:
codes = deepcopy(self.codes)
new_index = self._shallow_copy(
levels=levels,
codes=codes,
names=names,
sortorder=self.sortorder,
_set_identity=_set_identity,
)
if dtype:
warnings.warn(
"parameter dtype is deprecated and will be removed in a future "
"version. Use the astype method instead.",
FutureWarning,
stacklevel=2,
)
new_index = new_index.astype(dtype)
return new_index
def __array__(self, dtype=None) -> np.ndarray:
""" the array interface, return my values """
return self.values
def view(self, cls=None):
""" this is defined as a copy with the same identity """
result = self.copy()
result._id = self._id
return result
@doc(Index.__contains__)
def __contains__(self, key: Any) -> bool:
hash(key)
try:
self.get_loc(key)
return True
except (LookupError, TypeError, ValueError):
return False
@cache_readonly
def dtype(self) -> np.dtype:
return np.dtype("O")
def _is_memory_usage_qualified(self) -> bool:
""" return a boolean if we need a qualified .info display """
def f(l):
return "mixed" in l or "string" in l or "unicode" in l
return any(f(l) for l in self._inferred_type_levels)
@doc(Index.memory_usage)
def memory_usage(self, deep: bool = False) -> int:
# we are overwriting our base class to avoid
# computing .values here which could materialize
# a tuple representation unnecessarily
return self._nbytes(deep)
@cache_readonly
def nbytes(self) -> int:
""" return the number of bytes in the underlying data """
return self._nbytes(False)
def _nbytes(self, deep: bool = False) -> int:
"""
return the number of bytes in the underlying data
deeply introspect the level data if deep=True
include the engine hashtable
*this is in internal routine*
"""
# for implementations with no useful getsizeof (PyPy)
objsize = 24
level_nbytes = sum(i.memory_usage(deep=deep) for i in self.levels)
label_nbytes = sum(i.nbytes for i in self.codes)
names_nbytes = sum(getsizeof(i, objsize) for i in self.names)
result = level_nbytes + label_nbytes + names_nbytes
# include our engine hashtable
result += self._engine.sizeof(deep=deep)
return result
# --------------------------------------------------------------------
# Rendering Methods
def _formatter_func(self, tup):
"""
Formats each item in tup according to its level's formatter function.
"""
formatter_funcs = [level._formatter_func for level in self.levels]
return tuple(func(val) for func, val in zip(formatter_funcs, tup))
def _format_data(self, name=None):
"""
Return the formatted data as a unicode string
"""
return format_object_summary(
self, self._formatter_func, name=name, line_break_each_value=True
)
def _format_attrs(self):
"""
Return a list of tuples of the (attr,formatted_value).
"""
return format_object_attrs(self, include_dtype=False)
def _format_native_types(self, na_rep="nan", **kwargs):
new_levels = []
new_codes = []
# go through the levels and format them
for level, level_codes in zip(self.levels, self.codes):
level = level._format_native_types(na_rep=na_rep, **kwargs)
# add nan values, if there are any
mask = level_codes == -1
if mask.any():
nan_index = len(level)
level = np.append(level, na_rep)
assert not level_codes.flags.writeable # i.e. copy is needed
level_codes = level_codes.copy() # make writeable
level_codes[mask] = nan_index
new_levels.append(level)
new_codes.append(level_codes)
if len(new_levels) == 1:
# a single-level multi-index
return Index(new_levels[0].take(new_codes[0]))._format_native_types()
else:
# reconstruct the multi-index
mi = MultiIndex(
levels=new_levels,
codes=new_codes,
names=self.names,
sortorder=self.sortorder,
verify_integrity=False,
)
return mi._values
def format(
self,
name: Optional[bool] = None,
formatter: Optional[Callable] = None,
na_rep: Optional[str] = None,
names: bool = False,
space: int = 2,
sparsify=None,
adjoin: bool = True,
) -> List:
if name is not None:
names = name
if len(self) == 0:
return []
stringified_levels = []
for lev, level_codes in zip(self.levels, self.codes):
na = na_rep if na_rep is not None else _get_na_rep(lev.dtype.type)
if len(lev) > 0:
formatted = lev.take(level_codes).format(formatter=formatter)
# we have some NA
mask = level_codes == -1
if mask.any():
formatted = np.array(formatted, dtype=object)
formatted[mask] = na
formatted = formatted.tolist()
else:
# weird all NA case
formatted = [
pprint_thing(na if isna(x) else x, escape_chars=("\t", "\r", "\n"))
for x in algos.take_1d(lev._values, level_codes)
]
stringified_levels.append(formatted)
result_levels = []
for lev, lev_name in zip(stringified_levels, self.names):
level = []
if names:
level.append(
pprint_thing(lev_name, escape_chars=("\t", "\r", "\n"))
if lev_name is not None
else ""
)
level.extend(np.array(lev, dtype=object))
result_levels.append(level)
if sparsify is None:
sparsify = get_option("display.multi_sparse")
if sparsify:
sentinel = ""
# GH3547 use value of sparsify as sentinel if it's "Falsey"
assert isinstance(sparsify, bool) or sparsify is lib.no_default
if sparsify in [False, lib.no_default]:
sentinel = sparsify
# little bit of a kludge job for #1217
result_levels = sparsify_labels(
result_levels, start=int(names), sentinel=sentinel
)
if adjoin:
from pandas.io.formats.format import get_adjustment
adj = get_adjustment()
return adj.adjoin(space, *result_levels).split("\n")
else:
return result_levels
# --------------------------------------------------------------------
# Names Methods
def _get_names(self):
return FrozenList(self._names)
def _set_names(self, names, level=None, validate=True):
"""
Set new names on index. Each name has to be a hashable type.
Parameters
----------
values : str or sequence
name(s) to set
level : int, level name, or sequence of int/level names (default None)
If the index is a MultiIndex (hierarchical), level(s) to set (None
for all levels). Otherwise level must be None
validate : boolean, default True
validate that the names match level lengths
Raises
------
TypeError if each name is not hashable.
Notes
-----
sets names on levels. WARNING: mutates!
Note that you generally want to set this *after* changing levels, so
that it only acts on copies
"""
# GH 15110
# Don't allow a single string for names in a MultiIndex
if names is not None and not is_list_like(names):
raise ValueError("Names should be list-like for a MultiIndex")
names = list(names)
if validate:
if level is not None and len(names) != len(level):
raise ValueError("Length of names must match length of level.")
if level is None and len(names) != self.nlevels:
raise ValueError(
"Length of names must match number of levels in MultiIndex."
)
if level is None:
level = range(self.nlevels)
else:
level = [self._get_level_number(lev) for lev in level]
# set the name
for lev, name in zip(level, names):
if name is not None:
# GH 20527
# All items in 'names' need to be hashable:
if not is_hashable(name):
raise TypeError(
f"{type(self).__name__}.name must be a hashable type"
)
self._names[lev] = name
# If .levels has been accessed, the names in our cache will be stale.
self._reset_cache()
names = property(
fset=_set_names, fget=_get_names, doc="""\nNames of levels in MultiIndex.\n"""
)
# --------------------------------------------------------------------
@doc(Index._get_grouper_for_level)
def _get_grouper_for_level(self, mapper, level):
indexer = self.codes[level]
level_index = self.levels[level]
if mapper is not None:
# Handle group mapping function and return
level_values = self.levels[level].take(indexer)
grouper = level_values.map(mapper)
return grouper, None, None
codes, uniques = algos.factorize(indexer, sort=True)
if len(uniques) > 0 and uniques[0] == -1:
# Handle NAs
mask = indexer != -1
ok_codes, uniques = algos.factorize(indexer[mask], sort=True)
codes = np.empty(len(indexer), dtype=indexer.dtype)
codes[mask] = ok_codes
codes[~mask] = -1
if len(uniques) < len(level_index):
# Remove unobserved levels from level_index
level_index = level_index.take(uniques)
else:
# break references back to us so that setting the name
# on the output of a groupby doesn't reflect back here.
level_index = level_index.copy()
if level_index._can_hold_na:
grouper = level_index.take(codes, fill_value=True)
else:
grouper = level_index.take(codes)
return grouper, codes, level_index
@cache_readonly
def inferred_type(self) -> str:
return "mixed"
def _get_level_number(self, level) -> int:
count = self.names.count(level)
if (count > 1) and not is_integer(level):
raise ValueError(
f"The name {level} occurs multiple times, use a level number"
)
try:
level = self.names.index(level)
except ValueError as err:
if not is_integer(level):
raise KeyError(f"Level {level} not found") from err
elif level < 0:
level += self.nlevels
if level < 0:
orig_level = level - self.nlevels
raise IndexError(
f"Too many levels: Index has only {self.nlevels} levels, "
f"{orig_level} is not a valid level number"
) from err
# Note: levels are zero-based
elif level >= self.nlevels:
raise IndexError(
f"Too many levels: Index has only {self.nlevels} levels, "
f"not {level + 1}"
) from err
return level
@property
def _has_complex_internals(self) -> bool:
# used to avoid libreduction code paths, which raise or require conversion
return True
@cache_readonly
def is_monotonic_increasing(self) -> bool:
"""
return if the index is monotonic increasing (only equal or
increasing) values.
"""
if all(x.is_monotonic for x in self.levels):
# If each level is sorted, we can operate on the codes directly. GH27495
return libalgos.is_lexsorted(
[x.astype("int64", copy=False) for x in self.codes]
)
# reversed() because lexsort() wants the most significant key last.
values = [
self._get_level_values(i)._values for i in reversed(range(len(self.levels)))
]
try:
sort_order = np.lexsort(values)
return Index(sort_order).is_monotonic
except TypeError:
# we have mixed types and np.lexsort is not happy
return Index(self._values).is_monotonic
@cache_readonly
def is_monotonic_decreasing(self) -> bool:
"""
return if the index is monotonic decreasing (only equal or
decreasing) values.
"""
# monotonic decreasing if and only if reverse is monotonic increasing
return self[::-1].is_monotonic_increasing
@cache_readonly
def _inferred_type_levels(self):
""" return a list of the inferred types, one for each level """
return [i.inferred_type for i in self.levels]
@doc(Index.duplicated)
def duplicated(self, keep="first"):
shape = map(len, self.levels)
ids = get_group_index(self.codes, shape, sort=False, xnull=False)
return duplicated_int64(ids, keep)
def fillna(self, value=None, downcast=None):
"""
fillna is not implemented for MultiIndex
"""
raise NotImplementedError("isna is not defined for MultiIndex")
@doc(Index.dropna)
def dropna(self, how="any"):
nans = [level_codes == -1 for level_codes in self.codes]
if how == "any":
indexer = np.any(nans, axis=0)
elif how == "all":
indexer = np.all(nans, axis=0)
else:
raise ValueError(f"invalid how option: {how}")
new_codes = [level_codes[~indexer] for level_codes in self.codes]
return self.copy(codes=new_codes, deep=True)
def _get_level_values(self, level, unique=False):
"""
Return vector of label values for requested level,
equal to the length of the index
**this is an internal method**
Parameters
----------
level : int level
unique : bool, default False
if True, drop duplicated values
Returns
-------
values : ndarray
"""
lev = self.levels[level]
level_codes = self.codes[level]
name = self._names[level]
if unique:
level_codes = algos.unique(level_codes)
filled = algos.take_1d(lev._values, level_codes, fill_value=lev._na_value)
return lev._shallow_copy(filled, name=name)
def get_level_values(self, level):
"""
Return vector of label values for requested level.
Length of returned vector is equal to the length of the index.
Parameters
----------
level : int or str
``level`` is either the integer position of the level in the
MultiIndex, or the name of the level.
Returns
-------
values : Index
Values is a level of this MultiIndex converted to
a single :class:`Index` (or subclass thereof).
Examples
--------
Create a MultiIndex:
>>> mi = pd.MultiIndex.from_arrays((list('abc'), list('def')))
>>> mi.names = ['level_1', 'level_2']
Get level values by supplying level as either integer or name:
>>> mi.get_level_values(0)
Index(['a', 'b', 'c'], dtype='object', name='level_1')
>>> mi.get_level_values('level_2')
Index(['d', 'e', 'f'], dtype='object', name='level_2')
"""
level = self._get_level_number(level)
values = self._get_level_values(level)
return values
@doc(Index.unique)
def unique(self, level=None):
if level is None:
return super().unique()
else:
level = self._get_level_number(level)
return self._get_level_values(level=level, unique=True)
def _to_safe_for_reshape(self):
""" convert to object if we are a categorical """
return self.set_levels([i._to_safe_for_reshape() for i in self.levels])
def to_frame(self, index=True, name=None):
"""
Create a DataFrame with the levels of the MultiIndex as columns.
Column ordering is determined by the DataFrame constructor with data as
a dict.
.. versionadded:: 0.24.0
Parameters
----------
index : bool, default True
Set the index of the returned DataFrame as the original MultiIndex.
name : list / sequence of str, optional
The passed names should substitute index level names.
Returns
-------
DataFrame : a DataFrame containing the original MultiIndex data.
See Also
--------
DataFrame : Two-dimensional, size-mutable, potentially heterogeneous
tabular data.
"""
from pandas import DataFrame
if name is not None:
if not is_list_like(name):
raise TypeError("'name' must be a list / sequence of column names.")
if len(name) != len(self.levels):
raise ValueError(
"'name' should have same length as number of levels on index."
)
idx_names = name
else:
idx_names = self.names
# Guarantee resulting column order - PY36+ dict maintains insertion order
result = DataFrame(
{
(level if lvlname is None else lvlname): self._get_level_values(level)
for lvlname, level in zip(idx_names, range(len(self.levels)))
},
copy=False,
)
if index:
result.index = self
return result
def to_flat_index(self):
"""
Convert a MultiIndex to an Index of Tuples containing the level values.
.. versionadded:: 0.24.0
Returns
-------
pd.Index
Index with the MultiIndex data represented in Tuples.
Notes
-----
This method will simply return the caller if called by anything other
than a MultiIndex.
Examples
--------
>>> index = pd.MultiIndex.from_product(
... [['foo', 'bar'], ['baz', 'qux']],
... names=['a', 'b'])
>>> index.to_flat_index()
Index([('foo', 'baz'), ('foo', 'qux'),
('bar', 'baz'), ('bar', 'qux')],
dtype='object')
"""
return Index(self._values, tupleize_cols=False)
@property
def is_all_dates(self) -> bool:
return False
def is_lexsorted(self) -> bool:
"""
Return True if the codes are lexicographically sorted.
Returns
-------
bool
Examples
--------
In the below examples, the first level of the MultiIndex is sorted because
a<b<c, so there is no need to look at the next level.
>>> pd.MultiIndex.from_arrays([['a', 'b', 'c'], ['d', 'e', 'f']]).is_lexsorted()
True
>>> pd.MultiIndex.from_arrays([['a', 'b', 'c'], ['d', 'f', 'e']]).is_lexsorted()
True
In case there is a tie, the lexicographical sorting looks
at the next level of the MultiIndex.
>>> pd.MultiIndex.from_arrays([[0, 1, 1], ['a', 'b', 'c']]).is_lexsorted()
True
>>> pd.MultiIndex.from_arrays([[0, 1, 1], ['a', 'c', 'b']]).is_lexsorted()
False
>>> pd.MultiIndex.from_arrays([['a', 'a', 'b', 'b'],
... ['aa', 'bb', 'aa', 'bb']]).is_lexsorted()
True
>>> pd.MultiIndex.from_arrays([['a', 'a', 'b', 'b'],
... ['bb', 'aa', 'aa', 'bb']]).is_lexsorted()
False
"""
return self.lexsort_depth == self.nlevels
@cache_readonly
def lexsort_depth(self):
if self.sortorder is not None:
return self.sortorder
return self._lexsort_depth()
def _lexsort_depth(self) -> int:
"""
Compute and return the lexsort_depth, the number of levels of the
MultiIndex that are sorted lexically
Returns
-------
int
"""
int64_codes = [ensure_int64(level_codes) for level_codes in self.codes]
for k in range(self.nlevels, 0, -1):
if libalgos.is_lexsorted(int64_codes[:k]):
return k
return 0
def _sort_levels_monotonic(self):
"""
This is an *internal* function.
Create a new MultiIndex from the current to monotonically sorted
items IN the levels. This does not actually make the entire MultiIndex
monotonic, JUST the levels.
The resulting MultiIndex will have the same outward
appearance, meaning the same .values and ordering. It will also
be .equals() to the original.
Returns
-------
MultiIndex
Examples
--------
>>> mi = pd.MultiIndex(levels=[['a', 'b'], ['bb', 'aa']],
... codes=[[0, 0, 1, 1], [0, 1, 0, 1]])
>>> mi
MultiIndex([('a', 'bb'),
('a', 'aa'),
('b', 'bb'),
('b', 'aa')],
)
>>> mi.sort_values()
MultiIndex([('a', 'aa'),
('a', 'bb'),
('b', 'aa'),
('b', 'bb')],
)
"""
if self.is_lexsorted() and self.is_monotonic:
return self
new_levels = []
new_codes = []
for lev, level_codes in zip(self.levels, self.codes):
if not lev.is_monotonic:
try:
# indexer to reorder the levels
indexer = lev.argsort()
except TypeError:
pass
else:
lev = lev.take(indexer)
# indexer to reorder the level codes
indexer = ensure_int64(indexer)
ri = lib.get_reverse_indexer(indexer, len(indexer))
level_codes = algos.take_1d(ri, level_codes)
new_levels.append(lev)
new_codes.append(level_codes)
return MultiIndex(
new_levels,
new_codes,
names=self.names,
sortorder=self.sortorder,
verify_integrity=False,
)
def remove_unused_levels(self):
"""
Create new MultiIndex from current that removes unused levels.
Unused level(s) means levels that are not expressed in the
labels. The resulting MultiIndex will have the same outward
appearance, meaning the same .values and ordering. It will
also be .equals() to the original.
Returns
-------
MultiIndex
Examples
--------
>>> mi = pd.MultiIndex.from_product([range(2), list('ab')])
>>> mi
MultiIndex([(0, 'a'),
(0, 'b'),
(1, 'a'),
(1, 'b')],
)
>>> mi[2:]
MultiIndex([(1, 'a'),
(1, 'b')],
)
The 0 from the first level is not represented
and can be removed
>>> mi2 = mi[2:].remove_unused_levels()
>>> mi2.levels
FrozenList([[1], ['a', 'b']])
"""
new_levels = []
new_codes = []
changed = False
for lev, level_codes in zip(self.levels, self.codes):
# Since few levels are typically unused, bincount() is more
# efficient than unique() - however it only accepts positive values
# (and drops order):
uniques = np.where(np.bincount(level_codes + 1) > 0)[0] - 1
has_na = int(len(uniques) and (uniques[0] == -1))
if len(uniques) != len(lev) + has_na:
# We have unused levels
changed = True
# Recalculate uniques, now preserving order.
# Can easily be cythonized by exploiting the already existing
# "uniques" and stop parsing "level_codes" when all items
# are found:
uniques = algos.unique(level_codes)
if has_na:
na_idx = np.where(uniques == -1)[0]
# Just ensure that -1 is in first position:
uniques[[0, na_idx[0]]] = uniques[[na_idx[0], 0]]
# codes get mapped from uniques to 0:len(uniques)
# -1 (if present) is mapped to last position
code_mapping = np.zeros(len(lev) + has_na)
# ... and reassigned value -1:
code_mapping[uniques] = np.arange(len(uniques)) - has_na
level_codes = code_mapping[level_codes]
# new levels are simple
lev = lev.take(uniques[has_na:])
new_levels.append(lev)
new_codes.append(level_codes)
result = self.view()
if changed:
result._reset_identity()
result._set_levels(new_levels, validate=False)
result._set_codes(new_codes, validate=False)
return result
# --------------------------------------------------------------------
# Pickling Methods
def __reduce__(self):
"""Necessary for making this object picklable"""
d = dict(
levels=list(self.levels),
codes=list(self.codes),
sortorder=self.sortorder,
names=list(self.names),
)
return ibase._new_Index, (type(self), d), None
# --------------------------------------------------------------------
def __getitem__(self, key):
if is_scalar(key):
key = com.cast_scalar_indexer(key, warn_float=True)
retval = []
for lev, level_codes in zip(self.levels, self.codes):
if level_codes[key] == -1:
retval.append(np.nan)
else:
retval.append(lev[level_codes[key]])
return tuple(retval)
else:
if com.is_bool_indexer(key):
key = np.asarray(key, dtype=bool)
sortorder = self.sortorder
else:
# cannot be sure whether the result will be sorted
sortorder = None
if isinstance(key, Index):
key = np.asarray(key)
new_codes = [level_codes[key] for level_codes in self.codes]
return MultiIndex(
levels=self.levels,
codes=new_codes,
names=self.names,
sortorder=sortorder,
verify_integrity=False,
)
@Appender(_index_shared_docs["take"] % _index_doc_kwargs)
def take(self, indices, axis=0, allow_fill=True, fill_value=None, **kwargs):
nv.validate_take(tuple(), kwargs)
indices = ensure_platform_int(indices)
taken = self._assert_take_fillable(
self.codes,
indices,
allow_fill=allow_fill,
fill_value=fill_value,
na_value=-1,
)
return MultiIndex(
levels=self.levels, codes=taken, names=self.names, verify_integrity=False
)
def _assert_take_fillable(
self, values, indices, allow_fill=True, fill_value=None, na_value=None
):
""" Internal method to handle NA filling of take """
# only fill if we are passing a non-None fill_value
if allow_fill and fill_value is not None:
if (indices < -1).any():
msg = (
"When allow_fill=True and fill_value is not None, "
"all indices must be >= -1"
)
raise ValueError(msg)
taken = [lab.take(indices) for lab in self.codes]
mask = indices == -1
if mask.any():
masked = []
for new_label in taken:
label_values = new_label
label_values[mask] = na_value
masked.append(np.asarray(label_values))
taken = masked
else:
taken = [lab.take(indices) for lab in self.codes]
return taken
def append(self, other):
"""
Append a collection of Index options together
Parameters
----------
other : Index or list/tuple of indices
Returns
-------
appended : Index
"""
if not isinstance(other, (list, tuple)):
other = [other]
if all(
(isinstance(o, MultiIndex) and o.nlevels >= self.nlevels) for o in other
):
arrays = []
for i in range(self.nlevels):
label = self._get_level_values(i)
appended = [o._get_level_values(i) for o in other]
arrays.append(label.append(appended))
return MultiIndex.from_arrays(arrays, names=self.names)
to_concat = (self._values,) + tuple(k._values for k in other)
new_tuples = np.concatenate(to_concat)
# if all(isinstance(x, MultiIndex) for x in other):
try:
return MultiIndex.from_tuples(new_tuples, names=self.names)
except (TypeError, IndexError):
return Index(new_tuples)
def argsort(self, *args, **kwargs) -> np.ndarray:
return self._values.argsort(*args, **kwargs)
@Appender(_index_shared_docs["repeat"] % _index_doc_kwargs)
def repeat(self, repeats, axis=None):
nv.validate_repeat(tuple(), dict(axis=axis))
repeats = ensure_platform_int(repeats)
return MultiIndex(
levels=self.levels,
codes=[
level_codes.view(np.ndarray).astype(np.intp).repeat(repeats)
for level_codes in self.codes
],
names=self.names,
sortorder=self.sortorder,
verify_integrity=False,
)
def where(self, cond, other=None):
raise NotImplementedError(".where is not supported for MultiIndex operations")
def drop(self, codes, level=None, errors="raise"):
"""
Make new MultiIndex with passed list of codes deleted
Parameters
----------
codes : array-like
Must be a list of tuples
level : int or level name, default None
errors : str, default 'raise'
Returns
-------
dropped : MultiIndex
"""
if level is not None:
return self._drop_from_level(codes, level, errors)
if not isinstance(codes, (np.ndarray, Index)):
try:
codes = com.index_labels_to_array(codes, dtype=object)
except ValueError:
pass
inds = []
for level_codes in codes:
try:
loc = self.get_loc(level_codes)
# get_loc returns either an integer, a slice, or a boolean
# mask
if isinstance(loc, int):
inds.append(loc)
elif isinstance(loc, slice):
inds.extend(range(loc.start, loc.stop))
elif com.is_bool_indexer(loc):
if self.lexsort_depth == 0:
warnings.warn(
"dropping on a non-lexsorted multi-index "
"without a level parameter may impact performance.",
PerformanceWarning,
stacklevel=3,
)
loc = loc.nonzero()[0]
inds.extend(loc)
else:
msg = f"unsupported indexer of type {type(loc)}"
raise AssertionError(msg)
except KeyError:
if errors != "ignore":
raise
return self.delete(inds)
def _drop_from_level(self, codes, level, errors="raise"):
codes = com.index_labels_to_array(codes)
i = self._get_level_number(level)
index = self.levels[i]
values = index.get_indexer(codes)
mask = ~algos.isin(self.codes[i], values)
if mask.all() and errors != "ignore":
raise KeyError(f"labels {codes} not found in level")
return self[mask]
def swaplevel(self, i=-2, j=-1):
"""
Swap level i with level j.
Calling this method does not change the ordering of the values.
Parameters
----------
i : int, str, default -2
First level of index to be swapped. Can pass level name as string.
Type of parameters can be mixed.
j : int, str, default -1
Second level of index to be swapped. Can pass level name as string.
Type of parameters can be mixed.
Returns
-------
MultiIndex
A new MultiIndex.
See Also
--------
Series.swaplevel : Swap levels i and j in a MultiIndex.
Dataframe.swaplevel : Swap levels i and j in a MultiIndex on a
particular axis.
Examples
--------
>>> mi = pd.MultiIndex(levels=[['a', 'b'], ['bb', 'aa']],
... codes=[[0, 0, 1, 1], [0, 1, 0, 1]])
>>> mi
MultiIndex([('a', 'bb'),
('a', 'aa'),
('b', 'bb'),
('b', 'aa')],
)
>>> mi.swaplevel(0, 1)
MultiIndex([('bb', 'a'),
('aa', 'a'),
('bb', 'b'),
('aa', 'b')],
)
"""
new_levels = list(self.levels)
new_codes = list(self.codes)
new_names = list(self.names)
i = self._get_level_number(i)
j = self._get_level_number(j)
new_levels[i], new_levels[j] = new_levels[j], new_levels[i]
new_codes[i], new_codes[j] = new_codes[j], new_codes[i]
new_names[i], new_names[j] = new_names[j], new_names[i]
return MultiIndex(
levels=new_levels, codes=new_codes, names=new_names, verify_integrity=False
)
def reorder_levels(self, order):
"""
Rearrange levels using input order. May not drop or duplicate levels.
Parameters
----------
order : list of int or list of str
List representing new level order. Reference level by number
(position) or by key (label).
Returns
-------
MultiIndex
"""
order = [self._get_level_number(i) for i in order]
if len(order) != self.nlevels:
raise AssertionError(
f"Length of order must be same as number of levels ({self.nlevels}), "
f"got {len(order)}"
)
new_levels = [self.levels[i] for i in order]
new_codes = [self.codes[i] for i in order]
new_names = [self.names[i] for i in order]
return MultiIndex(
levels=new_levels, codes=new_codes, names=new_names, verify_integrity=False
)
def _get_codes_for_sorting(self):
"""
we categorizing our codes by using the
available categories (all, not just observed)
excluding any missing ones (-1); this is in preparation
for sorting, where we need to disambiguate that -1 is not
a valid valid
"""
def cats(level_codes):
return np.arange(
np.array(level_codes).max() + 1 if len(level_codes) else 0,
dtype=level_codes.dtype,
)
return [
Categorical.from_codes(level_codes, cats(level_codes), ordered=True)
for level_codes in self.codes
]
def sortlevel(self, level=0, ascending=True, sort_remaining=True):
"""
Sort MultiIndex at the requested level.
The result will respect the original ordering of the associated
factor at that level.
Parameters
----------
level : list-like, int or str, default 0
If a string is given, must be a name of the level.
If list-like must be names or ints of levels.
ascending : bool, default True
False to sort in descending order.
Can also be a list to specify a directed ordering.
sort_remaining : sort by the remaining levels after level
Returns
-------
sorted_index : pd.MultiIndex
Resulting index.
indexer : np.ndarray
Indices of output values in original index.
"""
if isinstance(level, (str, int)):
level = [level]
level = [self._get_level_number(lev) for lev in level]
sortorder = None
# we have a directed ordering via ascending
if isinstance(ascending, list):
if not len(level) == len(ascending):
raise ValueError("level must have same length as ascending")
indexer = lexsort_indexer(
[self.codes[lev] for lev in level], orders=ascending
)
# level ordering
else:
codes = list(self.codes)
shape = list(self.levshape)
# partition codes and shape
primary = tuple(codes[lev] for lev in level)
primshp = tuple(shape[lev] for lev in level)
# Reverse sorted to retain the order of
# smaller indices that needs to be removed
for lev in sorted(level, reverse=True):
codes.pop(lev)
shape.pop(lev)
if sort_remaining:
primary += primary + tuple(codes)
primshp += primshp + tuple(shape)
else:
sortorder = level[0]
indexer = indexer_from_factorized(primary, primshp, compress=False)
if not ascending:
indexer = indexer[::-1]
indexer = ensure_platform_int(indexer)
new_codes = [level_codes.take(indexer) for level_codes in self.codes]
new_index = MultiIndex(
codes=new_codes,
levels=self.levels,
names=self.names,
sortorder=sortorder,
verify_integrity=False,
)
return new_index, indexer
def reindex(self, target, method=None, level=None, limit=None, tolerance=None):
"""
Create index with target's values (move/add/delete values as necessary)
Returns
-------
new_index : pd.MultiIndex
Resulting index
indexer : np.ndarray or None
Indices of output values in original index.
"""
# GH6552: preserve names when reindexing to non-named target
# (i.e. neither Index nor Series).
preserve_names = not hasattr(target, "names")
if level is not None:
if method is not None:
raise TypeError("Fill method not supported if level passed")
# GH7774: preserve dtype/tz if target is empty and not an Index.
# target may be an iterator
target = ibase.ensure_has_len(target)
if len(target) == 0 and not isinstance(target, Index):
idx = self.levels[level]
attrs = idx._get_attributes_dict()
attrs.pop("freq", None) # don't preserve freq
target = type(idx)._simple_new(np.empty(0, dtype=idx.dtype), **attrs)
else:
target = ensure_index(target)
target, indexer, _ = self._join_level(
target, level, how="right", return_indexers=True, keep_order=False
)
else:
target = ensure_index(target)
if self.equals(target):
indexer = None
else:
if self.is_unique:
indexer = self.get_indexer(
target, method=method, limit=limit, tolerance=tolerance
)
else:
raise ValueError("cannot handle a non-unique multi-index!")
if not isinstance(target, MultiIndex):
if indexer is None:
target = self
elif (indexer >= 0).all():
target = self.take(indexer)
else:
# hopefully?
target = MultiIndex.from_tuples(target)
if (
preserve_names
and target.nlevels == self.nlevels
and target.names != self.names
):
target = target.copy(deep=False)
target.names = self.names
return target, indexer
# --------------------------------------------------------------------
# Indexing Methods
def _check_indexing_error(self, key):
if not is_hashable(key) or is_iterator(key):
# We allow tuples if they are hashable, whereas other Index
# subclasses require scalar.
# We have to explicitly exclude generators, as these are hashable.
raise InvalidIndexError(key)
def _should_fallback_to_positional(self) -> bool:
"""
Should integer key(s) be treated as positional?
"""
# GH#33355
return self.levels[0]._should_fallback_to_positional()
def _get_values_for_loc(self, series: "Series", loc, key):
"""
Do a positional lookup on the given Series, returning either a scalar
or a Series.
Assumes that `series.index is self`
"""
new_values = series._values[loc]
if is_scalar(loc):
return new_values
new_index = self[loc]
new_index = maybe_droplevels(new_index, key)
new_ser = series._constructor(new_values, index=new_index, name=series.name)
return new_ser.__finalize__(series)
def _convert_listlike_indexer(self, keyarr):
"""
Parameters
----------
keyarr : list-like
Indexer to convert.
Returns
-------
tuple (indexer, keyarr)
indexer is an ndarray or None if cannot convert
keyarr are tuple-safe keys
"""
indexer, keyarr = super()._convert_listlike_indexer(keyarr)
# are we indexing a specific level
if indexer is None and len(keyarr) and not isinstance(keyarr[0], tuple):
level = 0
_, indexer = self.reindex(keyarr, level=level)
# take all
if indexer is None:
indexer = np.arange(len(self))
check = self.levels[0].get_indexer(keyarr)
mask = check == -1
if mask.any():
raise KeyError(f"{keyarr[mask]} not in index")
return indexer, keyarr
def _get_partial_string_timestamp_match_key(self, key):
"""
Translate any partial string timestamp matches in key, returning the
new key.
Only relevant for MultiIndex.
"""
# GH#10331
if isinstance(key, str) and self.levels[0]._supports_partial_string_indexing:
# Convert key '2016-01-01' to
# ('2016-01-01'[, slice(None, None, None)]+)
key = (key,) + (slice(None),) * (len(self.levels) - 1)
if isinstance(key, tuple):
# Convert (..., '2016-01-01', ...) in tuple to
# (..., slice('2016-01-01', '2016-01-01', None), ...)
new_key = []
for i, component in enumerate(key):
if (
isinstance(component, str)
and self.levels[i]._supports_partial_string_indexing
):
new_key.append(slice(component, component, None))
else:
new_key.append(component)
key = tuple(new_key)
return key
@Appender(_index_shared_docs["get_indexer"] % _index_doc_kwargs)
def get_indexer(self, target, method=None, limit=None, tolerance=None):
method = missing.clean_reindex_fill_method(method)
target = ensure_index(target)
# empty indexer
if is_list_like(target) and not len(target):
return ensure_platform_int(np.array([]))
if not isinstance(target, MultiIndex):
try:
target = MultiIndex.from_tuples(target)
except (TypeError, ValueError):
# let's instead try with a straight Index
if method is None:
return Index(self._values).get_indexer(
target, method=method, limit=limit, tolerance=tolerance
)
if not self.is_unique:
raise ValueError("Reindexing only valid with uniquely valued Index objects")
if method == "pad" or method == "backfill":
if tolerance is not None:
raise NotImplementedError(
"tolerance not implemented yet for MultiIndex"
)
indexer = self._engine.get_indexer(
values=self._values, target=target, method=method, limit=limit
)
elif method == "nearest":
raise NotImplementedError(
"method='nearest' not implemented yet "
"for MultiIndex; see GitHub issue 9365"
)
else:
indexer = self._engine.get_indexer(target)
return ensure_platform_int(indexer)
def get_slice_bound(
self, label: Union[Hashable, Sequence[Hashable]], side: str, kind: str
) -> int:
"""
For an ordered MultiIndex, compute slice bound
that corresponds to given label.
Returns leftmost (one-past-the-rightmost if `side=='right') position
of given label.
Parameters
----------
label : object or tuple of objects
side : {'left', 'right'}
kind : {'loc', 'getitem'}
Returns
-------
int
Index of label.
Notes
-----
This method only works if level 0 index of the MultiIndex is lexsorted.
Examples
--------
>>> mi = pd.MultiIndex.from_arrays([list('abbc'), list('gefd')])
Get the locations from the leftmost 'b' in the first level
until the end of the multiindex:
>>> mi.get_slice_bound('b', side="left", kind="loc")
1
Like above, but if you get the locations from the rightmost
'b' in the first level and 'f' in the second level:
>>> mi.get_slice_bound(('b','f'), side="right", kind="loc")
3
See Also
--------
MultiIndex.get_loc : Get location for a label or a tuple of labels.
MultiIndex.get_locs : Get location for a label/slice/list/mask or a
sequence of such.
"""
if not isinstance(label, tuple):
label = (label,)
return self._partial_tup_index(label, side=side)
def slice_locs(self, start=None, end=None, step=None, kind=None):
"""
For an ordered MultiIndex, compute the slice locations for input
labels.
The input labels can be tuples representing partial levels, e.g. for a
MultiIndex with 3 levels, you can pass a single value (corresponding to
the first level), or a 1-, 2-, or 3-tuple.
Parameters
----------
start : label or tuple, default None
If None, defaults to the beginning
end : label or tuple
If None, defaults to the end
step : int or None
Slice step
kind : string, optional, defaults None
Returns
-------
(start, end) : (int, int)
Notes
-----
This method only works if the MultiIndex is properly lexsorted. So,
if only the first 2 levels of a 3-level MultiIndex are lexsorted,
you can only pass two levels to ``.slice_locs``.
Examples
--------
>>> mi = pd.MultiIndex.from_arrays([list('abbd'), list('deff')],
... names=['A', 'B'])
Get the slice locations from the beginning of 'b' in the first level
until the end of the multiindex:
>>> mi.slice_locs(start='b')
(1, 4)
Like above, but stop at the end of 'b' in the first level and 'f' in
the second level:
>>> mi.slice_locs(start='b', end=('b', 'f'))
(1, 3)
See Also
--------
MultiIndex.get_loc : Get location for a label or a tuple of labels.
MultiIndex.get_locs : Get location for a label/slice/list/mask or a
sequence of such.
"""
# This function adds nothing to its parent implementation (the magic
# happens in get_slice_bound method), but it adds meaningful doc.
return super().slice_locs(start, end, step, kind=kind)
def _partial_tup_index(self, tup, side="left"):
if len(tup) > self.lexsort_depth:
raise UnsortedIndexError(
f"Key length ({len(tup)}) was greater than MultiIndex lexsort depth "
f"({self.lexsort_depth})"
)
n = len(tup)
start, end = 0, len(self)
zipped = zip(tup, self.levels, self.codes)
for k, (lab, lev, labs) in enumerate(zipped):
section = labs[start:end]
if lab not in lev and not isna(lab):
if not lev.is_type_compatible(lib.infer_dtype([lab], skipna=False)):
raise TypeError(f"Level type mismatch: {lab}")
# short circuit
loc = lev.searchsorted(lab, side=side)
if side == "right" and loc >= 0:
loc -= 1
return start + section.searchsorted(loc, side=side)
idx = self._get_loc_single_level_index(lev, lab)
if k < n - 1:
end = start + section.searchsorted(idx, side="right")
start = start + section.searchsorted(idx, side="left")
else:
return start + section.searchsorted(idx, side=side)
def _get_loc_single_level_index(self, level_index: Index, key: Hashable) -> int:
"""
If key is NA value, location of index unify as -1.
Parameters
----------
level_index: Index
key : label
Returns
-------
loc : int
If key is NA value, loc is -1
Else, location of key in index.
See Also
--------
Index.get_loc : The get_loc method for (single-level) index.
"""
if is_scalar(key) and isna(key):
return -1
else:
return level_index.get_loc(key)
def get_loc(self, key, method=None):
"""
Get location for a label or a tuple of labels.
The location is returned as an integer/slice or boolean
mask.
Parameters
----------
key : label or tuple of labels (one for each level)
method : None
Returns
-------
loc : int, slice object or boolean mask
If the key is past the lexsort depth, the return may be a
boolean mask array, otherwise it is always a slice or int.
See Also
--------
Index.get_loc : The get_loc method for (single-level) index.
MultiIndex.slice_locs : Get slice location given start label(s) and
end label(s).
MultiIndex.get_locs : Get location for a label/slice/list/mask or a
sequence of such.
Notes
-----
The key cannot be a slice, list of same-level labels, a boolean mask,
or a sequence of such. If you want to use those, use
:meth:`MultiIndex.get_locs` instead.
Examples
--------
>>> mi = pd.MultiIndex.from_arrays([list('abb'), list('def')])
>>> mi.get_loc('b')
slice(1, 3, None)
>>> mi.get_loc(('b', 'e'))
1
"""
if method is not None:
raise NotImplementedError(
"only the default get_loc method is "
"currently supported for MultiIndex"
)
hash(key)
def _maybe_to_slice(loc):
"""convert integer indexer to boolean mask or slice if possible"""
if not isinstance(loc, np.ndarray) or loc.dtype != "int64":
return loc
loc = lib.maybe_indices_to_slice(loc, len(self))
if isinstance(loc, slice):
return loc
mask = np.empty(len(self), dtype="bool")
mask.fill(False)
mask[loc] = True
return mask
if not isinstance(key, tuple):
loc = self._get_level_indexer(key, level=0)
return _maybe_to_slice(loc)
keylen = len(key)
if self.nlevels < keylen:
raise KeyError(
f"Key length ({keylen}) exceeds index depth ({self.nlevels})"
)
if keylen == self.nlevels and self.is_unique:
return self._engine.get_loc(key)
# -- partial selection or non-unique index
# break the key into 2 parts based on the lexsort_depth of the index;
# the first part returns a continuous slice of the index; the 2nd part
# needs linear search within the slice
i = self.lexsort_depth
lead_key, follow_key = key[:i], key[i:]
start, stop = (
self.slice_locs(lead_key, lead_key) if lead_key else (0, len(self))
)
if start == stop:
raise KeyError(key)
if not follow_key:
return slice(start, stop)
warnings.warn(
"indexing past lexsort depth may impact performance.",
PerformanceWarning,
stacklevel=10,
)
loc = np.arange(start, stop, dtype="int64")
for i, k in enumerate(follow_key, len(lead_key)):
mask = self.codes[i][loc] == self._get_loc_single_level_index(
self.levels[i], k
)
if not mask.all():
loc = loc[mask]
if not len(loc):
raise KeyError(key)
return _maybe_to_slice(loc) if len(loc) != stop - start else slice(start, stop)
def get_loc_level(self, key, level=0, drop_level: bool = True):
"""
Get location and sliced index for requested label(s)/level(s).
Parameters
----------
key : label or sequence of labels
level : int/level name or list thereof, optional
drop_level : bool, default True
If ``False``, the resulting index will not drop any level.
Returns
-------
loc : A 2-tuple where the elements are:
Element 0: int, slice object or boolean array
Element 1: The resulting sliced multiindex/index. If the key
contains all levels, this will be ``None``.
See Also
--------
MultiIndex.get_loc : Get location for a label or a tuple of labels.
MultiIndex.get_locs : Get location for a label/slice/list/mask or a
sequence of such.
Examples
--------
>>> mi = pd.MultiIndex.from_arrays([list('abb'), list('def')],
... names=['A', 'B'])
>>> mi.get_loc_level('b')
(slice(1, 3, None), Index(['e', 'f'], dtype='object', name='B'))
>>> mi.get_loc_level('e', level='B')
(array([False, True, False]), Index(['b'], dtype='object', name='A'))
>>> mi.get_loc_level(['b', 'e'])
(1, None)
"""
# different name to distinguish from maybe_droplevels
def maybe_mi_droplevels(indexer, levels, drop_level: bool):
if not drop_level:
return self[indexer]
# kludge around
orig_index = new_index = self[indexer]
levels = [self._get_level_number(i) for i in levels]
for i in sorted(levels, reverse=True):
try:
new_index = new_index.droplevel(i)
except ValueError:
# no dropping here
return orig_index
return new_index
if isinstance(level, (tuple, list)):
if len(key) != len(level):
raise AssertionError(
"Key for location must have same length as number of levels"
)
result = None
for lev, k in zip(level, key):
loc, new_index = self.get_loc_level(k, level=lev)
if isinstance(loc, slice):
mask = np.zeros(len(self), dtype=bool)
mask[loc] = True
loc = mask
result = loc if result is None else result & loc
return result, maybe_mi_droplevels(result, level, drop_level)
level = self._get_level_number(level)
# kludge for #1796
if isinstance(key, list):
key = tuple(key)
if isinstance(key, tuple) and level == 0:
try:
if key in self.levels[0]:
indexer = self._get_level_indexer(key, level=level)
new_index = maybe_mi_droplevels(indexer, [0], drop_level)
return indexer, new_index
except (TypeError, InvalidIndexError):
pass
if not any(isinstance(k, slice) for k in key):
# partial selection
# optionally get indexer to avoid re-calculation
def partial_selection(key, indexer=None):
if indexer is None:
indexer = self.get_loc(key)
ilevels = [
i for i in range(len(key)) if key[i] != slice(None, None)
]
return indexer, maybe_mi_droplevels(indexer, ilevels, drop_level)
if len(key) == self.nlevels and self.is_unique:
# Complete key in unique index -> standard get_loc
try:
return (self._engine.get_loc(key), None)
except KeyError as e:
raise KeyError(key) from e
else:
return partial_selection(key)
else:
indexer = None
for i, k in enumerate(key):
if not isinstance(k, slice):
k = self._get_level_indexer(k, level=i)
if isinstance(k, slice):
# everything
if k.start == 0 and k.stop == len(self):
k = slice(None, None)
else:
k_index = k
if isinstance(k, slice):
if k == slice(None, None):
continue
else:
raise TypeError(key)
if indexer is None:
indexer = k_index
else: # pragma: no cover
indexer &= k_index
if indexer is None:
indexer = slice(None, None)
ilevels = [i for i in range(len(key)) if key[i] != slice(None, None)]
return indexer, maybe_mi_droplevels(indexer, ilevels, drop_level)
else:
indexer = self._get_level_indexer(key, level=level)
return indexer, maybe_mi_droplevels(indexer, [level], drop_level)
def _get_level_indexer(self, key, level=0, indexer=None):
# return an indexer, boolean array or a slice showing where the key is
# in the totality of values
# if the indexer is provided, then use this
level_index = self.levels[level]
level_codes = self.codes[level]
def convert_indexer(start, stop, step, indexer=indexer, codes=level_codes):
# given the inputs and the codes/indexer, compute an indexer set
# if we have a provided indexer, then this need not consider
# the entire labels set
r = np.arange(start, stop, step)
if indexer is not None and len(indexer) != len(codes):
# we have an indexer which maps the locations in the labels
# that we have already selected (and is not an indexer for the
# entire set) otherwise this is wasteful so we only need to
# examine locations that are in this set the only magic here is
# that the result are the mappings to the set that we have
# selected
from pandas import Series
mapper = Series(indexer)
indexer = codes.take(ensure_platform_int(indexer))
result = Series(Index(indexer).isin(r).nonzero()[0])
m = result.map(mapper)
m = np.asarray(m)
else:
m = np.zeros(len(codes), dtype=bool)
m[np.in1d(codes, r, assume_unique=Index(codes).is_unique)] = True
return m
if isinstance(key, slice):
# handle a slice, returning a slice if we can
# otherwise a boolean indexer
try:
if key.start is not None:
start = level_index.get_loc(key.start)
else:
start = 0
if key.stop is not None:
stop = level_index.get_loc(key.stop)
else:
stop = len(level_index) - 1
step = key.step
except KeyError:
# we have a partial slice (like looking up a partial date
# string)
start = stop = level_index.slice_indexer(
key.start, key.stop, key.step, kind="loc"
)
step = start.step
if isinstance(start, slice) or isinstance(stop, slice):
# we have a slice for start and/or stop
# a partial date slicer on a DatetimeIndex generates a slice
# note that the stop ALREADY includes the stopped point (if
# it was a string sliced)
start = getattr(start, "start", start)
stop = getattr(stop, "stop", stop)
return convert_indexer(start, stop, step)
elif level > 0 or self.lexsort_depth == 0 or step is not None:
# need to have like semantics here to right
# searching as when we are using a slice
# so include the stop+1 (so we include stop)
return convert_indexer(start, stop + 1, step)
else:
# sorted, so can return slice object -> view
i = level_codes.searchsorted(start, side="left")
j = level_codes.searchsorted(stop, side="right")
return slice(i, j, step)
else:
code = self._get_loc_single_level_index(level_index, key)
if level > 0 or self.lexsort_depth == 0:
# Desired level is not sorted
locs = np.array(level_codes == code, dtype=bool, copy=False)
if not locs.any():
# The label is present in self.levels[level] but unused:
raise KeyError(key)
return locs
i = level_codes.searchsorted(code, side="left")
j = level_codes.searchsorted(code, side="right")
if i == j:
# The label is present in self.levels[level] but unused:
raise KeyError(key)
return slice(i, j)
def get_locs(self, seq):
"""
Get location for a sequence of labels.
Parameters
----------
seq : label, slice, list, mask or a sequence of such
You should use one of the above for each level.
If a level should not be used, set it to ``slice(None)``.
Returns
-------
numpy.ndarray
NumPy array of integers suitable for passing to iloc.
See Also
--------
MultiIndex.get_loc : Get location for a label or a tuple of labels.
MultiIndex.slice_locs : Get slice location given start label(s) and
end label(s).
Examples
--------
>>> mi = pd.MultiIndex.from_arrays([list('abb'), list('def')])
>>> mi.get_locs('b') # doctest: +SKIP
array([1, 2], dtype=int64)
>>> mi.get_locs([slice(None), ['e', 'f']]) # doctest: +SKIP
array([1, 2], dtype=int64)
>>> mi.get_locs([[True, False, True], slice('e', 'f')]) # doctest: +SKIP
array([2], dtype=int64)
"""
from pandas.core.indexes.numeric import Int64Index
# must be lexsorted to at least as many levels
true_slices = [i for (i, s) in enumerate(com.is_true_slices(seq)) if s]
if true_slices and true_slices[-1] >= self.lexsort_depth:
raise UnsortedIndexError(
"MultiIndex slicing requires the index to be lexsorted: slicing "
f"on levels {true_slices}, lexsort depth {self.lexsort_depth}"
)
# indexer
# this is the list of all values that we want to select
n = len(self)
indexer = None
def _convert_to_indexer(r) -> Int64Index:
# return an indexer
if isinstance(r, slice):
m = np.zeros(n, dtype=bool)
m[r] = True
r = m.nonzero()[0]
elif com.is_bool_indexer(r):
if len(r) != n:
raise ValueError(
"cannot index with a boolean indexer "
"that is not the same length as the "
"index"
)
r = r.nonzero()[0]
return Int64Index(r)
def _update_indexer(idxr, indexer=indexer):
if indexer is None:
indexer = Index(np.arange(n))
if idxr is None:
return indexer
return indexer & idxr
for i, k in enumerate(seq):
if com.is_bool_indexer(k):
# a boolean indexer, must be the same length!
k = np.asarray(k)
indexer = _update_indexer(_convert_to_indexer(k), indexer=indexer)
elif is_list_like(k):
# a collection of labels to include from this level (these
# are or'd)
indexers: Optional[Int64Index] = None
for x in k:
try:
idxrs = _convert_to_indexer(
self._get_level_indexer(x, level=i, indexer=indexer)
)
indexers = idxrs if indexers is None else indexers | idxrs
except KeyError:
# ignore not founds
continue
if indexers is not None:
indexer = _update_indexer(indexers, indexer=indexer)
else:
# no matches we are done
return np.array([], dtype=np.int64)
elif com.is_null_slice(k):
# empty slice
indexer = _update_indexer(None, indexer=indexer)
elif isinstance(k, slice):
# a slice, include BOTH of the labels
indexer = _update_indexer(
_convert_to_indexer(
self._get_level_indexer(k, level=i, indexer=indexer)
),
indexer=indexer,
)
else:
# a single label
indexer = _update_indexer(
_convert_to_indexer(
self.get_loc_level(k, level=i, drop_level=False)[0]
),
indexer=indexer,
)
# empty indexer
if indexer is None:
return np.array([], dtype=np.int64)
assert isinstance(indexer, Int64Index), type(indexer)
indexer = self._reorder_indexer(seq, indexer)
return indexer._values
# --------------------------------------------------------------------
def _reorder_indexer(
self,
seq: Tuple[Union[Scalar, Iterable, AnyArrayLike], ...],
indexer: Int64Index,
) -> Int64Index:
"""
Reorder an indexer of a MultiIndex (self) so that the label are in the
same order as given in seq
Parameters
----------
seq : label/slice/list/mask or a sequence of such
indexer: an Int64Index indexer of self
Returns
-------
indexer : a sorted Int64Index indexer of self ordered as seq
"""
# If the index is lexsorted and the list_like label in seq are sorted
# then we do not need to sort
if self.is_lexsorted():
need_sort = False
for i, k in enumerate(seq):
if is_list_like(k):
if not need_sort:
k_codes = self.levels[i].get_indexer(k)
k_codes = k_codes[k_codes >= 0] # Filter absent keys
# True if the given codes are not ordered
need_sort = (k_codes[:-1] > k_codes[1:]).any()
# Bail out if both index and seq are sorted
if not need_sort:
return indexer
n = len(self)
keys: Tuple[np.ndarray, ...] = tuple()
# For each level of the sequence in seq, map the level codes with the
# order they appears in a list-like sequence
# This mapping is then use to reorder the indexer
for i, k in enumerate(seq):
if com.is_bool_indexer(k):
new_order = np.arange(n)[indexer]
elif is_list_like(k):
# Generate a map with all level codes as sorted initially
key_order_map = np.ones(len(self.levels[i]), dtype=np.uint64) * len(
self.levels[i]
)
# Set order as given in the indexer list
level_indexer = self.levels[i].get_indexer(k)
level_indexer = level_indexer[level_indexer >= 0] # Filter absent keys
key_order_map[level_indexer] = np.arange(len(level_indexer))
new_order = key_order_map[self.codes[i][indexer]]
else:
# For all other case, use the same order as the level
new_order = np.arange(n)[indexer]
keys = (new_order,) + keys
# Find the reordering using lexsort on the keys mapping
ind = np.lexsort(keys)
return indexer[ind]
def truncate(self, before=None, after=None):
"""
Slice index between two labels / tuples, return new MultiIndex
Parameters
----------
before : label or tuple, can be partial. Default None
None defaults to start
after : label or tuple, can be partial. Default None
None defaults to end
Returns
-------
truncated : MultiIndex
"""
if after and before and after < before:
raise ValueError("after < before")
i, j = self.levels[0].slice_locs(before, after)
left, right = self.slice_locs(before, after)
new_levels = list(self.levels)
new_levels[0] = new_levels[0][i:j]
new_codes = [level_codes[left:right] for level_codes in self.codes]
new_codes[0] = new_codes[0] - i
return MultiIndex(
levels=new_levels,
codes=new_codes,
names=self._names,
verify_integrity=False,
)
def equals(self, other: object) -> bool:
"""
Determines if two MultiIndex objects have the same labeling information
(the levels themselves do not necessarily have to be the same)
See Also
--------
equal_levels
"""
if self.is_(other):
return True
if not isinstance(other, Index):
return False
if not isinstance(other, MultiIndex):
# d-level MultiIndex can equal d-tuple Index
if not is_object_dtype(other.dtype):
# other cannot contain tuples, so cannot match self
return False
elif len(self) != len(other):
return False
return array_equivalent(self._values, other._values)
if self.nlevels != other.nlevels:
return False
if len(self) != len(other):
return False
for i in range(self.nlevels):
self_codes = self.codes[i]
self_codes = self_codes[self_codes != -1]
self_values = algos.take_nd(
np.asarray(self.levels[i]._values), self_codes, allow_fill=False
)
other_codes = other.codes[i]
other_codes = other_codes[other_codes != -1]
other_values = algos.take_nd(
np.asarray(other.levels[i]._values), other_codes, allow_fill=False
)
# since we use NaT both datetime64 and timedelta64 we can have a
# situation where a level is typed say timedelta64 in self (IOW it
# has other values than NaT) but types datetime64 in other (where
# its all NaT) but these are equivalent
if len(self_values) == 0 and len(other_values) == 0:
continue
if not array_equivalent(self_values, other_values):
return False
return True
def equal_levels(self, other) -> bool:
"""
Return True if the levels of both MultiIndex objects are the same
"""
if self.nlevels != other.nlevels:
return False
for i in range(self.nlevels):
if not self.levels[i].equals(other.levels[i]):
return False
return True
# --------------------------------------------------------------------
# Set Methods
def union(self, other, sort=None):
"""
Form the union of two MultiIndex objects
Parameters
----------
other : MultiIndex or array / Index of tuples
sort : False or None, default None
Whether to sort the resulting Index.
* None : Sort the result, except when
1. `self` and `other` are equal.
2. `self` has length 0.
3. Some values in `self` or `other` cannot be compared.
A RuntimeWarning is issued in this case.
* False : do not sort the result.
.. versionadded:: 0.24.0
.. versionchanged:: 0.24.1
Changed the default value from ``True`` to ``None``
(without change in behaviour).
Returns
-------
Index
Examples
--------
>>> idx1 = pd.MultiIndex.from_arrays(
... [[1, 1, 2, 2], ["Red", "Blue", "Red", "Blue"]]
... )
>>> idx1
MultiIndex([(1, 'Red'),
(1, 'Blue'),
(2, 'Red'),
(2, 'Blue')],
)
>>> idx2 = pd.MultiIndex.from_arrays(
... [[3, 3, 2, 2], ["Red", "Green", "Red", "Green"]]
... )
>>> idx2
MultiIndex([(3, 'Red'),
(3, 'Green'),
(2, 'Red'),
(2, 'Green')],
)
>>> idx1.union(idx2)
MultiIndex([(1, 'Blue'),
(1, 'Red'),
(2, 'Blue'),
(2, 'Green'),
(2, 'Red'),
(3, 'Green'),
(3, 'Red')],
)
>>> idx1.union(idx2, sort=False)
MultiIndex([(1, 'Red'),
(1, 'Blue'),
(2, 'Red'),
(2, 'Blue'),
(3, 'Red'),
(3, 'Green'),
(2, 'Green')],
)
"""
self._validate_sort_keyword(sort)
self._assert_can_do_setop(other)
other, result_names = self._convert_can_do_setop(other)
if len(other) == 0 or self.equals(other):
return self
# TODO: Index.union returns other when `len(self)` is 0.
if not is_object_dtype(other.dtype):
raise NotImplementedError(
"Can only union MultiIndex with MultiIndex or Index of tuples, "
"try mi.to_flat_index().union(other) instead."
)
uniq_tuples = lib.fast_unique_multiple([self._values, other._values], sort=sort)
return MultiIndex.from_arrays(
zip(*uniq_tuples), sortorder=0, names=result_names
)
def intersection(self, other, sort=False):
"""
Form the intersection of two MultiIndex objects.
Parameters
----------
other : MultiIndex or array / Index of tuples
sort : False or None, default False
Sort the resulting MultiIndex if possible
.. versionadded:: 0.24.0
.. versionchanged:: 0.24.1
Changed the default from ``True`` to ``False``, to match
behaviour from before 0.24.0
Returns
-------
Index
"""
self._validate_sort_keyword(sort)
self._assert_can_do_setop(other)
other, result_names = self._convert_can_do_setop(other)
if self.equals(other):
return self
if not is_object_dtype(other.dtype):
# The intersection is empty
# TODO: we have no tests that get here
return MultiIndex(
levels=self.levels,
codes=[[]] * self.nlevels,
names=result_names,
verify_integrity=False,
)
lvals = self._values
rvals = other._values
uniq_tuples = None # flag whether _inner_indexer was successful
if self.is_monotonic and other.is_monotonic:
try:
uniq_tuples = self._inner_indexer(lvals, rvals)[0]
sort = False # uniq_tuples is already sorted
except TypeError:
pass
if uniq_tuples is None:
other_uniq = set(rvals)
seen = set()
uniq_tuples = [
x for x in lvals if x in other_uniq and not (x in seen or seen.add(x))
]
if sort is None:
uniq_tuples = sorted(uniq_tuples)
if len(uniq_tuples) == 0:
return MultiIndex(
levels=self.levels,
codes=[[]] * self.nlevels,
names=result_names,
verify_integrity=False,
)
else:
return MultiIndex.from_arrays(
zip(*uniq_tuples), sortorder=0, names=result_names
)
def difference(self, other, sort=None):
"""
Compute set difference of two MultiIndex objects
Parameters
----------
other : MultiIndex
sort : False or None, default None
Sort the resulting MultiIndex if possible
.. versionadded:: 0.24.0
.. versionchanged:: 0.24.1
Changed the default value from ``True`` to ``None``
(without change in behaviour).
Returns
-------
diff : MultiIndex
"""
self._validate_sort_keyword(sort)
self._assert_can_do_setop(other)
other, result_names = self._convert_can_do_setop(other)
if len(other) == 0:
return self
if self.equals(other):
return MultiIndex(
levels=self.levels,
codes=[[]] * self.nlevels,
names=result_names,
verify_integrity=False,
)
this = self._get_unique_index()
indexer = this.get_indexer(other)
indexer = indexer.take((indexer != -1).nonzero()[0])
label_diff = np.setdiff1d(np.arange(this.size), indexer, assume_unique=True)
difference = this._values.take(label_diff)
if sort is None:
difference = sorted(difference)
if len(difference) == 0:
return MultiIndex(
levels=[[]] * self.nlevels,
codes=[[]] * self.nlevels,
names=result_names,
verify_integrity=False,
)
else:
return MultiIndex.from_tuples(difference, sortorder=0, names=result_names)
def _convert_can_do_setop(self, other):
result_names = self.names
if not isinstance(other, Index):
if len(other) == 0:
other = MultiIndex(
levels=[[]] * self.nlevels,
codes=[[]] * self.nlevels,
verify_integrity=False,
)
else:
msg = "other must be a MultiIndex or a list of tuples"
try:
other = MultiIndex.from_tuples(other)
except TypeError as err:
raise TypeError(msg) from err
else:
result_names = self.names if self.names == other.names else None
return other, result_names
# --------------------------------------------------------------------
@doc(Index.astype)
def astype(self, dtype, copy=True):
dtype = pandas_dtype(dtype)
if is_categorical_dtype(dtype):
msg = "> 1 ndim Categorical are not supported at this time"
raise NotImplementedError(msg)
elif not is_object_dtype(dtype):
raise TypeError(
f"Setting {type(self)} dtype to anything other "
"than object is not supported"
)
elif copy is True:
return self._shallow_copy()
return self
def _validate_insert_value(self, item):
if not isinstance(item, tuple):
# Pad the key with empty strings if lower levels of the key
# aren't specified:
item = (item,) + ("",) * (self.nlevels - 1)
elif len(item) != self.nlevels:
raise ValueError("Item must have length equal to number of levels.")
return item
def insert(self, loc: int, item):
"""
Make new MultiIndex inserting new item at location
Parameters
----------
loc : int
item : tuple
Must be same length as number of levels in the MultiIndex
Returns
-------
new_index : Index
"""
item = self._validate_insert_value(item)
new_levels = []
new_codes = []
for k, level, level_codes in zip(item, self.levels, self.codes):
if k not in level:
# have to insert into level
# must insert at end otherwise you have to recompute all the
# other codes
lev_loc = len(level)
level = level.insert(lev_loc, k)
else:
lev_loc = level.get_loc(k)
new_levels.append(level)
new_codes.append(np.insert(ensure_int64(level_codes), loc, lev_loc))
return MultiIndex(
levels=new_levels, codes=new_codes, names=self.names, verify_integrity=False
)
def delete(self, loc):
"""
Make new index with passed location deleted
Returns
-------
new_index : MultiIndex
"""
new_codes = [np.delete(level_codes, loc) for level_codes in self.codes]
return MultiIndex(
levels=self.levels,
codes=new_codes,
names=self.names,
verify_integrity=False,
)
@doc(Index.isin)
def isin(self, values, level=None):
if level is None:
values = MultiIndex.from_tuples(values, names=self.names)._values
return algos.isin(self._values, values)
else:
num = self._get_level_number(level)
levs = self.get_level_values(num)
if levs.size == 0:
return np.zeros(len(levs), dtype=np.bool_)
return levs.isin(values)
@classmethod
def _add_numeric_methods_add_sub_disabled(cls):
"""
Add in the numeric add/sub methods to disable.
"""
cls.__add__ = make_invalid_op("__add__")
cls.__radd__ = make_invalid_op("__radd__")
cls.__iadd__ = make_invalid_op("__iadd__")
cls.__sub__ = make_invalid_op("__sub__")
cls.__rsub__ = make_invalid_op("__rsub__")
cls.__isub__ = make_invalid_op("__isub__")
@classmethod
def _add_numeric_methods_disabled(cls):
"""
Add in numeric methods to disable other than add/sub.
"""
cls.__pow__ = make_invalid_op("__pow__")
cls.__rpow__ = make_invalid_op("__rpow__")
cls.__mul__ = make_invalid_op("__mul__")
cls.__rmul__ = make_invalid_op("__rmul__")
cls.__floordiv__ = make_invalid_op("__floordiv__")
cls.__rfloordiv__ = make_invalid_op("__rfloordiv__")
cls.__truediv__ = make_invalid_op("__truediv__")
cls.__rtruediv__ = make_invalid_op("__rtruediv__")
cls.__mod__ = make_invalid_op("__mod__")
cls.__rmod__ = make_invalid_op("__rmod__")
cls.__divmod__ = make_invalid_op("__divmod__")
cls.__rdivmod__ = make_invalid_op("__rdivmod__")
cls.__neg__ = make_invalid_op("__neg__")
cls.__pos__ = make_invalid_op("__pos__")
cls.__abs__ = make_invalid_op("__abs__")
cls.__inv__ = make_invalid_op("__inv__")
MultiIndex._add_numeric_methods_disabled()
MultiIndex._add_numeric_methods_add_sub_disabled()
MultiIndex._add_logical_methods_disabled()
def sparsify_labels(label_list, start: int = 0, sentinel=""):
pivoted = list(zip(*label_list))
k = len(label_list)
result = pivoted[: start + 1]
prev = pivoted[start]
for cur in pivoted[start + 1 :]:
sparse_cur = []
for i, (p, t) in enumerate(zip(prev, cur)):
if i == k - 1:
sparse_cur.append(t)
result.append(sparse_cur)
break
if p == t:
sparse_cur.append(sentinel)
else:
sparse_cur.extend(cur[i:])
result.append(sparse_cur)
break
prev = cur
return list(zip(*result))
def _get_na_rep(dtype) -> str:
return {np.datetime64: "NaT", np.timedelta64: "NaT"}.get(dtype, "NaN")
def maybe_droplevels(index, key):
"""
Attempt to drop level or levels from the given index.
Parameters
----------
index: Index
key : scalar or tuple
Returns
-------
Index
"""
# drop levels
original_index = index
if isinstance(key, tuple):
for _ in key:
try:
index = index.droplevel(0)
except ValueError:
# we have dropped too much, so back out
return original_index
else:
try:
index = index.droplevel(0)
except ValueError:
pass
return index
def _coerce_indexer_frozen(array_like, categories, copy: bool = False) -> np.ndarray:
"""
Coerce the array_like indexer to the smallest integer dtype that can encode all
of the given categories.
Parameters
----------
array_like : array-like
categories : array-like
copy : bool
Returns
-------
np.ndarray
Non-writeable.
"""
array_like = coerce_indexer_dtype(array_like, categories)
if copy:
array_like = array_like.copy()
array_like.flags.writeable = False
return array_like
| {
"repo_name": "rs2/pandas",
"path": "pandas/core/indexes/multi.py",
"copies": "1",
"size": "127449",
"license": "bsd-3-clause",
"hash": 3405361719005851600,
"line_mean": 32.5833992095,
"line_max": 88,
"alpha_frac": 0.5209770183,
"autogenerated": false,
"ratio": 4.355295082527424,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5376272100827424,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from threading import local
from .models import Revision
from .utils import obj_diff, obj_is_changed
class Transaction(object):
def __init__(self,):
"""Constructor of Transaction instance."""
self.ctx = local()
@property
def scopes(self):
"""Get transaction scopes."""
if not hasattr(self.ctx, 'transaction_scopes'):
self.ctx.transaction_scopes = []
return self.ctx.transaction_scopes
def begin(self):
"""Begin transaction."""
if self.locked(+1):
return
self.scopes.append(set())
return self
def commit(self):
"""Commit transaction"""
if self.locked(-1):
return
scope = self.scopes.pop()
for obj in scope:
self.post_save(obj)
def rollback(self):
"""Commit transaction"""
if self.locked(-1):
return
self.scopes.pop()
def lock(self):
self.ctx.locked = 0
return self
def locked(self, val=None):
if not hasattr(self.ctx, 'locked'):
return False
if val is not None:
self.ctx.locked += val
if self.ctx.locked == 0:
delattr(self.ctx, 'locked')
return False
return True
def add_obj(self, obj):
"""Adds object"""
self.pre_save(obj)
self.scopes[-1].add(obj)
def pre_save(self, obj):
"""Pre-save object"""
model = obj.__class__
if not hasattr(obj, 'revision_info'):
obj.revision_info = {}
info = obj.revision_info
try:
prev = model._default_manager.get(pk=obj.pk)
except model.DoesNotExist:
prev = model()
if not obj_is_changed(prev, obj):
obj.revision_info = {}
return
info['delta'] = obj_diff(prev, obj)
request = getattr(self.ctx, 'request', None)
if request:
if not info.get('editor'):
info['editor'] = request.user
if not info.get('editor_ip'):
info['editor_ip'] = request.META.get("REMOTE_ADDR")
if not getattr(info.get('editor'), 'pk', None): # Anonymuous
info['editor'] = None
def post_save(self, obj):
"""Post-save object"""
info = getattr(obj, 'revision_info', {})
if info:
rev = Revision(**info)
rev.content_object = obj
rev.save()
def __call__(self, f=None):
if f is None:
return self
@wraps(f)
def _decorated(*args, **kw):
with self:
rv = f(*args, **kw)
return rv
return _decorated
def __enter__(self):
self.begin()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type:
self.rollback()
else:
try:
self.commit()
except:
self.rollback()
raise
transaction = Transaction()
| {
"repo_name": "luzfcb/django-versioning",
"path": "versioning/transaction.py",
"copies": "1",
"size": "3079",
"license": "bsd-3-clause",
"hash": -1988440609296181200,
"line_mean": 24.6583333333,
"line_max": 69,
"alpha_frac": 0.5073075674,
"autogenerated": false,
"ratio": 4.235213204951857,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5242520772351857,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from threading import Lock, Event
from weakref import WeakValueDictionary
from os import walk
from os import curdir
from os import listdir
from os import makedirs
from os import unlink
from os.path import relpath
from os.path import join
from os.path import abspath
from os.path import exists
from errno import ENOENT, EEXIST
import os.path
import hashlib
import shutil
import json
import sys
from six import binary_type
# Variant of base64 compat layer inspired by BSD code from Bcfg2
# https://github.com/Bcfg2/bcfg2/blob/maint/src/lib/Bcfg2/Compat.py
if sys.version_info >= (3, 0):
from base64 import b64encode as _b64encode, b64decode as _b64decode
@wraps(_b64encode)
def b64encode(val, **kwargs):
try:
return _b64encode(val, **kwargs)
except TypeError:
return _b64encode(val.encode('UTF-8'), **kwargs).decode('UTF-8')
@wraps(_b64decode)
def b64decode(val, **kwargs):
return _b64decode(val.encode('UTF-8'), **kwargs).decode('UTF-8')
else:
from base64 import b64encode, b64decode
def unique_path_prefix(path):
m = hashlib.md5()
m.update(path.encode('utf-8'))
return m.hexdigest()
def copy(source, destination):
""" Copy file from source to destination if needed (skip if source
is destination).
"""
source = os.path.abspath(source)
destination = os.path.abspath(destination)
if source != destination:
shutil.copyfile(source, destination)
def ensure_directory(file_path):
directory = os.path.dirname(file_path)
if not os.path.exists(directory):
os.makedirs(directory)
def directory_files(directory):
"""
>>> from tempfile import mkdtemp
>>> from shutil import rmtree
>>> from os.path import join
>>> from os import makedirs
>>> tempdir = mkdtemp()
>>> with open(join(tempdir, "moo"), "w") as f: pass
>>> directory_files(tempdir)
['moo']
>>> subdir = join(tempdir, "cow", "sub1")
>>> makedirs(subdir)
>>> with open(join(subdir, "subfile1"), "w") as f: pass
>>> with open(join(subdir, "subfile2"), "w") as f: pass
>>> sorted(directory_files(tempdir))
['cow/sub1/subfile1', 'cow/sub1/subfile2', 'moo']
>>> rmtree(tempdir)
"""
contents = []
for path, _, files in walk(directory):
relative_path = relpath(path, directory)
for name in files:
# Return file1.txt, dataset_1_files/image.png, etc... don't
# include . in path.
if relative_path != curdir:
contents.append(join(relative_path, name))
else:
contents.append(name)
return contents
def filter_destination_params(destination_params, prefix):
destination_params = destination_params or {}
return dict([(key[len(prefix):], destination_params[key])
for key in destination_params
if key.startswith(prefix)])
def to_base64_json(data):
"""
>>> enc = to_base64_json(dict(a=5))
>>> dec = from_base64_json(enc)
>>> dec["a"]
5
"""
dumped = json_dumps(data)
return b64encode(dumped)
def from_base64_json(data):
return json.loads(b64decode(data))
class PathHelper(object):
'''
>>> import posixpath
>>> # Forcing local path to posixpath because Pulsar designed to be used with
>>> # posix client.
>>> posix_path_helper = PathHelper("/", local_path_module=posixpath)
>>> windows_slash = "\\\\"
>>> len(windows_slash)
1
>>> nt_path_helper = PathHelper(windows_slash, local_path_module=posixpath)
>>> posix_path_helper.remote_name("moo/cow")
'moo/cow'
>>> nt_path_helper.remote_name("moo/cow")
'moo\\\\cow'
>>> posix_path_helper.local_name("moo/cow")
'moo/cow'
>>> nt_path_helper.local_name("moo\\\\cow")
'moo/cow'
>>> posix_path_helper.from_posix_with_new_base("/galaxy/data/bowtie/hg19.fa", "/galaxy/data/", "/work/galaxy/data")
'/work/galaxy/data/bowtie/hg19.fa'
>>> posix_path_helper.from_posix_with_new_base("/galaxy/data/bowtie/hg19.fa", "/galaxy/data", "/work/galaxy/data")
'/work/galaxy/data/bowtie/hg19.fa'
>>> posix_path_helper.from_posix_with_new_base("/galaxy/data/bowtie/hg19.fa", "/galaxy/data", "/work/galaxy/data/")
'/work/galaxy/data/bowtie/hg19.fa'
'''
def __init__(self, separator, local_path_module=os.path):
self.separator = separator
self.local_join = local_path_module.join
self.local_sep = local_path_module.sep
def remote_name(self, local_name):
return self.remote_join(*local_name.split(self.local_sep))
def local_name(self, remote_name):
return self.local_join(*remote_name.split(self.separator))
def remote_join(self, *args):
return self.separator.join(args)
def from_posix_with_new_base(self, posix_path, old_base, new_base):
# TODO: Test with new_base as a windows path against nt_path_helper.
if old_base.endswith("/"):
old_base = old_base[:-1]
if not posix_path.startswith(old_base):
message_template = "Cannot compute new path for file %s, does not start with %s."
message = message_template % (posix_path, old_base)
raise Exception(message)
stripped_path = posix_path[len(old_base):]
while stripped_path.startswith("/"):
stripped_path = stripped_path[1:]
path_parts = stripped_path.split(self.separator)
if new_base.endswith(self.separator):
new_base = new_base[:-len(self.separator)]
return self.remote_join(new_base, *path_parts)
class TransferEventManager(object):
def __init__(self):
self.events = WeakValueDictionary(dict())
self.events_lock = Lock()
def acquire_event(self, path, force_clear=False):
with self.events_lock:
if path in self.events:
event_holder = self.events[path]
else:
event_holder = EventHolder(Event(), path, self)
self.events[path] = event_holder
if force_clear:
event_holder.event.clear()
return event_holder
class EventHolder(object):
def __init__(self, event, path, condition_manager):
self.event = event
self.path = path
self.condition_manager = condition_manager
self.failed = False
def release(self):
self.event.set()
def fail(self):
self.failed = True
def json_loads(obj):
if isinstance(obj, binary_type):
obj = obj.decode("utf-8")
return json.loads(obj)
def json_dumps(obj):
if isinstance(obj, binary_type):
obj = obj.decode("utf-8")
return json.dumps(obj, cls=ClientJsonEncoder)
class ClientJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, binary_type):
return obj.decode("utf-8")
return json.JSONEncoder.default(self, obj)
class MessageQueueUUIDStore(object):
"""Persistent dict-like object for persisting message queue UUIDs that are
awaiting acknowledgement or that have been operated on.
"""
def __init__(self, persistence_directory, subdirs=None):
if subdirs is None:
subdirs = ['acknowledge_uuids']
self.__store = abspath(join(persistence_directory, *subdirs))
try:
makedirs(self.__store)
except (OSError, IOError) as exc:
if exc.errno != EEXIST:
raise
def __path(self, item):
return join(self.__store, item)
def __contains__(self, item):
return exists(self.__path(item))
def __setitem__(self, key, value):
open(self.__path(key), 'w').write(json.dumps(value))
def __getitem__(self, key):
return json.loads(open(self.__path(key)).read())
def __delitem__(self, key):
try:
unlink(self.__path(key))
except (OSError, IOError) as exc:
if exc.errno == ENOENT:
raise KeyError(key)
raise
def keys(self):
return iter(listdir(self.__store))
def get_time(self, key):
try:
return os.stat(self.__path(key)).st_mtime
except (OSError, IOError) as exc:
if exc.errno == ENOENT:
raise KeyError(key)
raise
def set_time(self, key):
try:
os.utime(self.__path(key), None)
except (OSError, IOError) as exc:
if exc.errno == ENOENT:
raise KeyError(key)
raise
| {
"repo_name": "ssorgatem/pulsar",
"path": "pulsar/client/util.py",
"copies": "1",
"size": "8569",
"license": "apache-2.0",
"hash": -8727887246279321000,
"line_mean": 29.4946619217,
"line_max": 119,
"alpha_frac": 0.6140739876,
"autogenerated": false,
"ratio": 3.669807280513919,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9783028893597285,
"avg_score": 0.00017047490332656375,
"num_lines": 281
} |
from functools import wraps
from threading import Lock
import logging
__all__ = [
'Speedometer',
]
class Speedometer(object):
logger = logging.getLogger('speedometer')
def __init__(self, metric, debug=__debug__):
self.metric = metric
self._lock = Lock()
self.debug = debug
def patch_object(self, original, *method_names):
for name in method_names:
setattr(
original,
name,
self.create_method_wrapper(original, name)
)
def create_method_wrapper(self, original, name):
original_method = getattr(original, name)
@wraps(original_method)
def wrapper(*args, **kwargs):
metric_start = self.metric.start()
try:
return original_method(*args, **kwargs)
finally:
if self.debug:
self._update_metric(metric_start)
else:
self._safely_update_metric(metric_start)
return wrapper
def _safely_update_metric(self, metric_start):
try:
self._update_metric(metric_start)
except Exception:
try:
self.logger.exception('Cant update metric')
except Exception:
pass
def _update_metric(self, metric_start):
with self._lock:
self.metric.finish(metric_start)
def pop_stats(self):
with self._lock:
result = self.metric.get_results()
self.metric.cleanup()
return result
| {
"repo_name": "minmax/python-speedometer",
"path": "speedometer/speedometer.py",
"copies": "1",
"size": "1585",
"license": "unlicense",
"hash": 5217181700408366000,
"line_mean": 25.8644067797,
"line_max": 60,
"alpha_frac": 0.5438485804,
"autogenerated": false,
"ratio": 4.439775910364146,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 59
} |
from functools import wraps
from threading import RLock
from collections import defaultdict
class AlreadyRunningError(Exception):
pass
def try_lock(keyfn=None):
'''Allow function to run only once. `keyfn` can be used to select a subset
of the function arguments/kwargs to lock on.
Will raise `AlreadyRunningError` if runtion already running.
Example::
def push(day, records):
...
And we care only about the day argument, when we'll do::
@try_lock(lambda args, kw: args[0])
def push(day, records):
...
'''
keyfn = keyfn or (lambda args, kw: 7) # Return the same key
by_key_locks = defaultdict(RLock)
master_lock = RLock()
def wrapper(func):
@wraps(func)
def wrapped(*args, **kw):
key = keyfn(args, kw)
# Get lock for key
with master_lock:
lock = by_key_locks[key]
if not lock.acquire(blocking=False):
raise AlreadyRunningError(
"{} already running for {}".format(func.__name__, key))
try:
return func(*args, **kw)
finally:
lock.release()
return wrapped
return wrapper
| {
"repo_name": "tebeka/pythonwise",
"path": "trylock.py",
"copies": "1",
"size": "1255",
"license": "bsd-3-clause",
"hash": -5518853996895444000,
"line_mean": 25.7021276596,
"line_max": 78,
"alpha_frac": 0.5689243028,
"autogenerated": false,
"ratio": 4.327586206896552,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005599104143337066,
"num_lines": 47
} |
from functools import wraps
from threading import Thread
from app import render_template, redirect, url_for, request, session, flash, Markup
# decorators
def login_required(f):
@wraps(f)
def wrap(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash('You need to log in first.')
return redirect(url_for('login'))
return wrap
def login_reminder(f):
@wraps(f)
def wrap(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
message = Markup("<a href='/login'>Sign in</a> or <a href='/register'>register</a> to play.")
flash(message)
return f(*args, **kwargs)
return wrap
# This decorator is to perform asynchronous tasks (such as sending emails)
def async(f):
def wrapper(*args, **kwargs):
thr = Thread(target=f, args=args, kwargs=kwargs)
thr.start()
return wrapper
# Started, but not finished this decorator. I need to think about if it makes sense to implement this. There might be use cases for a similar decorator to limit trading times/days, but again, that might not serve a purpose.
# def after_hours_mode(f):
# @wraps(f)
# def wrap(*args, **kwargs):
# now = datetime.datetime.utcnow()
# if now.weekday() >= 5:
# #don't allow queries UNLESS the stock is NOT in db
# pass
# else:
# return f(*args, **kwargs)
# return wrap
# If there's no connectivity to yahoo-finance api, bypass and query db instead, but also indicate this to user
# def db_if_yahoo_fail(f):
# @wraps(f)
# def wrap(*args, **kwargs):
# try:
# f(*args, **kwargs)
# return flash('hi')
# except:
# flash("Couldn't connect to yahoo-finance API, getting quotes from database.")
# # return search_company(*args)
# return redirect(url_for('news'))
# return wrap
# -------------------------------------------------------
| {
"repo_name": "abmorton/stockhawk",
"path": "decorators.py",
"copies": "1",
"size": "1826",
"license": "mit",
"hash": -4874174813102836000,
"line_mean": 28.4516129032,
"line_max": 223,
"alpha_frac": 0.6549835706,
"autogenerated": false,
"ratio": 3.0844594594594597,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9122971906666153,
"avg_score": 0.023294224678661282,
"num_lines": 62
} |
from functools import wraps
from threading import Thread
def before(f, chain=False):
"""Runs f before the decorated function."""
def decorator(g):
@wraps(g)
def h(*args, **kargs):
if chain:
return g(f(*args, **kargs))
else:
f(*args, **kargs)
return g(*args, **kargs)
return h
return decorator
def after(f, chain=False):
"""Runs f with the result of the decorated function."""
def decorator(g):
@wraps(g)
def h(*args, **kargs):
if chain:
return f(g(*args, **kargs))
else:
r = g(*args, **kargs)
f(*args, **kargs)
return r
return h
return decorator
def during(f):
"""Runs f during the decorated function's execution in a separate thread."""
def decorator(g):
@wraps(g)
def h(*args, **kargs):
tf = Thread(target=f, args=args, kwargs=kargs)
tf.start()
r = g(*args, **kargs)
tf.join()
return r
return h
return decorator
| {
"repo_name": "astex/sequential",
"path": "sequential/decorators.py",
"copies": "1",
"size": "1148",
"license": "mit",
"hash": -3097837276728778000,
"line_mean": 24.5111111111,
"line_max": 80,
"alpha_frac": 0.4912891986,
"autogenerated": false,
"ratio": 4.2518518518518515,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5243141050451852,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from threepio import logger
from django.utils import timezone
from rest_framework import exceptions, status
from rest_framework.decorators import detail_route
from rest_framework.exceptions import ValidationError
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
from core import exceptions as core_exceptions
from core.models import IdentityMembership, CloudAdministrator
from core.models.status_type import StatusType
from api.permissions import (
ApiAuthOptional, ApiAuthRequired, EnabledUserRequired,
InMaintenance, CloudAdminRequired
)
from api.v2.views.mixins import MultipleFieldLookup
def unresolved_requests_only(fn):
"""
Only allow an unresolved request to be processed.
"""
@wraps(fn)
def wrapper(self, request, *args, **kwargs):
instance = self.get_object()
staff_can_act = request.user.is_staff or request.user.is_superuser or CloudAdministrator.objects.filter(user=request.user.id).exists()
user_can_act = request.user == getattr(instance, 'created_by')
if not (user_can_act or staff_can_act):
message = (
"Method '%s' not allowed: "
"Only staff members and the owner are authorized to make this request."
% self.request.method
)
raise exceptions.NotAuthenticated(detail=message)
if not staff_can_act and (hasattr(instance, "is_closed") and instance.is_closed()):
message = (
"Method '%s' not allowed: "
"the request has already been resolved "
"and cannot be modified by a non-staff user."
% self.request.method
)
raise exceptions.MethodNotAllowed(self.request.method,
detail=message)
else:
return fn(self, request, *args, **kwargs)
return wrapper
class AuthViewSet(ModelViewSet):
http_method_names = ['get', 'put', 'patch', 'post',
'delete', 'head', 'options', 'trace']
permission_classes = (InMaintenance,
EnabledUserRequired,
ApiAuthRequired,)
class AdminAuthViewSet(AuthViewSet):
permission_classes = (InMaintenance,
CloudAdminRequired,
EnabledUserRequired,
ApiAuthRequired,)
class AuthOptionalViewSet(ModelViewSet):
permission_classes = (InMaintenance,
ApiAuthOptional,)
class AuthReadOnlyViewSet(ReadOnlyModelViewSet):
permission_classes = (InMaintenance,
ApiAuthOptional,)
class OwnerUpdateViewSet(AuthViewSet):
"""
Base class ViewSet to handle the case where a normal user should see 'GET'
and an owner (or admin) should be allowed to PUT or PATCH
"""
http_method_names = ['get', 'put', 'patch', 'post',
'delete', 'head', 'options', 'trace']
@property
def allowed_methods(self):
raise Exception("The @property-method 'allowed_methods' should be"
" handled by the subclass of OwnerUpdateViewSet")
class BaseRequestViewSet(MultipleFieldLookup, AuthViewSet):
"""
Base class ViewSet to handle requests
"""
admin_serializer_class = None
model = None
lookup_fields = ("id", "uuid")
def get_queryset(self):
"""
Return users requests or all the requests if the user is an admin.
"""
assert self.model is not None, (
"%s should include a `model` attribute."
% self.__class__.__name__
)
if self.request.user.is_staff:
return self.model.objects.all().order_by('-start_date')
return self.model.objects.filter(created_by=self.request.user).order_by('-start_date')
def get_serializer_class(self):
"""
Return the `serializer_class` or `admin_serializer_class`
given the users privileges.
"""
assert self.admin_serializer_class is not None, (
"%s should include an `admin_serializer_class` attribute."
% self.__class__.__name__
)
http_method = self.request._request.method
if http_method != 'POST' and self.request.user.is_staff:
return self.admin_serializer_class
return self.serializer_class
def perform_create(self, serializer):
# NOTE: An identity could possible have multiple memberships
# It may be better to directly take membership rather than an identity
identity_id = serializer.initial_data.get("identity")
status, _ = StatusType.objects.get_or_create(name="pending")
try:
# NOTE: This is *NOT* going to be a sufficient query when sharing..
membership = IdentityMembership.objects.get(identity=identity_id)
instance = serializer.save(
membership=membership,
status=status,
created_by=self.request.user
)
if serializer.initial_data.get("admin_url"):
admin_url = serializer.initial_data.get("admin_url") + str(instance.id)
self.submit_action(instance, options={"admin_url": admin_url})
else:
self.submit_action(instance)
except (core_exceptions.ProviderLimitExceeded, # NOTE: DEPRECATED -- REMOVE SOON, USE BELOW.
core_exceptions.RequestLimitExceeded):
message = "Only one active request is allowed per provider."
raise exceptions.MethodNotAllowed('create', detail=message)
except core_exceptions.InvalidMembership:
message = (
"The user '%s' is not a valid member."
% self.request.user.username
)
raise exceptions.ParseError(detail=message)
except IdentityMembership.DoesNotExist:
message = (
"The identity '%s' does not have a membership"
% identity_id
)
raise exceptions.ParseError(detail=message)
except Exception as e:
message = str(e)
raise exceptions.ParseError(detail=message)
@unresolved_requests_only
def destroy(self, request, *args, **kwargs):
instance = self.get_object()
try:
self.perform_destroy(instance)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
message = {
"An error was encoutered when closing the request: %s" % e.message
}
logger.exception(e)
raise exceptions.ParseError(detail=message)
@detail_route()
def approve(self, *args, **kwargs):
"""
See the deny docs
"""
request_obj = self.get_object()
SerializerCls = self.get_serializer_class()
serializer = SerializerCls(
request_obj, context={'request': self.request})
if not request_obj:
raise ValidationError(
"Request unknown. "
"Could not approve request."
)
if not serializer.is_valid():
raise ValidationError(
"Serializer could not be validated: %s"
"Could not approve request."
% (serializer.errors,)
)
approve_status = StatusType.objects.get(name='approved')
request_obj = serializer.save(status=approve_status)
self.approve_action(request_obj)
return Response(serializer.data)
@detail_route()
def deny(self, *args, **kwargs):
"""
#FIXME: Both of these actions do something similar, they also 'create and abuse' serializers. Is there a better way to handle this? Lets lok into how `create` vs `perform_create` is called in a DRF 'normal' view.
"""
request_obj = self.get_object()
SerializerCls = self.get_serializer_class()
if not request_obj:
raise ValidationError(
"Request unknown. "
"Could not deny request."
)
# Mocking a validation of data...
serializer = SerializerCls(
request_obj, data={}, partial=True,
context={'request': self.request})
if not serializer.is_valid():
raise ValidationError(
"Serializer could not be validated: %s"
"Could not deny request."
% (serializer.errors,)
)
deny_status = StatusType.objects.get(name='denied')
request_obj = serializer.save(status=deny_status)
self.deny_action(request_obj)
return Response(serializer.data)
def perform_destroy(self, instance):
"""
Add an end date to a request and take no further action
"""
status, _ = StatusType.objects.get_or_create(name="closed")
instance.status = status
instance.end_date = timezone.now()
instance.save()
def perform_update(self, serializer):
"""
Updates the request and performs any update actions.
"""
# NOTE: An identity could possible have multiple memberships
# It may be better to directly take membership rather than an identity
identity = serializer.initial_data.get('identity', {})
membership = None
if isinstance(identity, dict):
identity_id = identity.get("id", None)
else:
identity_id = identity
try:
if identity_id is not None:
membership = IdentityMembership.objects.get(
identity=identity_id)
if membership:
instance = serializer.save(end_date=timezone.now(),
membership=membership)
else:
if self.request.method == "PATCH":
instance = serializer.save(status=StatusType.objects.get(id=serializer.initial_data['status']))
else:
instance = serializer.save()
if instance.is_approved():
self.approve_action(instance)
if instance.is_closed():
self.close_action(instance)
if instance.is_denied():
self.deny_action(instance)
except (core_exceptions.ProviderLimitExceeded, # NOTE: DEPRECATED -- REMOVE SOON, USE BELOW.
core_exceptions.RequestLimitExceeded):
message = "Only one active request is allowed per provider."
raise exceptions.MethodNotAllowed('create', detail=message)
except core_exceptions.InvalidMembership:
message = (
"The user '%s' is not a valid member."
% self.request.user.username
)
raise exceptions.ParseError(detail=message)
except IdentityMembership.DoesNotExist:
message = (
"The identity '%s' does not have a membership"
% identity_id
)
raise exceptions.ParseError(detail=message)
except Exception as e:
message = {
"An error was encoutered when updating the request: %s" % e.message
}
logger.exception(e)
raise exceptions.ParseError(detail=message)
@unresolved_requests_only
def update(self, request, *args, **kwargs):
"""
Update the request for the specific identifier
"""
return super(BaseRequestViewSet, self).update(request, *args, **kwargs)
def approve_action(self, instance):
"""
Perform the approved action for the request
"""
def deny_action(self, instance):
"""
Perform the denied action for the request
"""
def submit_action(self, instance):
"""
Perform the submit action for a new request
"""
| {
"repo_name": "CCI-MOC/GUI-Backend",
"path": "api/v2/views/base.py",
"copies": "1",
"size": "12075",
"license": "apache-2.0",
"hash": 6240476827036305000,
"line_mean": 36.734375,
"line_max": 220,
"alpha_frac": 0.5897308489,
"autogenerated": false,
"ratio": 4.683863460046548,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00033694542210389476,
"num_lines": 320
} |
from functools import wraps
from time import sleep
from mydjangoapp.celeryconf import app
from .models import Job
from .messagequeue import send_msg
from .redisconf import redis_conn
def update_job(fn):
@wraps(fn)
def wrapper(job_id, *args, **kwargs):
job = Job.objects.get(id=job_id)
job.status = 'started'
job.save()
try:
result = fn(*args, **kwargs)
job.result = result
job.status = 'finished'
job.save()
except:
job.result = None
job.status = 'failed'
job.save()
token = redis_conn.get(job.user_id)
if token:
send_msg({'user_id': job.user_id, 'job_id': job.id, 'status': job.status})
return wrapper
@app.task
@update_job
def power(n):
"""Return 2 to the n'th power"""
return 2 ** n
@app.task
@update_job
def fib(n):
"""Return the n'th Fibonacci number.
"""
if n < 0:
raise ValueError("Fibonacci numbers are only defined for n >= 0.")
return _fib(n)
def _fib(n):
if n == 0 or n == 1:
return n
else:
return _fib(n - 1) + _fib(n - 2)
@app.task
@update_job
def sleepwake(n):
"""sleeping for a number of seconds"""
sleep(n)
return n
@update_job
def syncsleepwake(n=1):
"""sleeping for a number of seconds"""
sleep(n)
return n
TASK_MAPPING = {
'power': power,
'fibonacci': fib,
'sleepwake': sleepwake,
} | {
"repo_name": "wenxinwilliam/docker-django-celery",
"path": "mydjangoapp/mydjangoapp/tasks.py",
"copies": "1",
"size": "1474",
"license": "mit",
"hash": -6299101498262889000,
"line_mean": 18.4078947368,
"line_max": 86,
"alpha_frac": 0.5664857531,
"autogenerated": false,
"ratio": 3.2395604395604396,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43060461926604393,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from time import sleep
import capybara
from capybara.exceptions import ElementNotFound, FrozenInTime, ScopeError
from capybara.helpers import Timer
from capybara.node.actions import ActionsMixin
from capybara.node.finders import FindersMixin
from capybara.node.matchers import MatchersMixin
class Base(FindersMixin, ActionsMixin, MatchersMixin, object):
"""
A :class:`Base` represents either an element on a page through the subclass :class:`Element` or
a document through :class:`Document`.
Both types of Node share the same methods, used for interacting with the elements on the page.
These methods are divided into three categories: finders, actions, and matchers. These are found
in the classes :class:`FindersMixin`, :class:`ActionsMixin`, and :class:`MatchersMixin`
respectively.
A :class:`Session` exposes all methods from :class:`Document` directly::
session = Session("selenium", my_app)
session.visit("/")
session.fill_in("Foo", value="Bar") # from capybara.node.actions.ActionsMixin
bar = session.find("#bar") # from capybara.node.finders.FindersMixin
bar.select("Baz", field="Quox") # from capybara.node.actions.ActionsMixin
session.has_css("#foobar") # from capybara.node.matchers.MatchersMixin
Args:
session (Session): The session from which this node originated.
base (driver.Node): The underlying driver node.
"""
def __init__(self, session, base):
self.session = session
self.base = base
self.allow_reload = False
self._contexts = []
def reload(self):
"""
Reloads the underlying driver node.
Returns:
node.Base: This node.
"""
return self
def __eq__(self, other):
return id(self) == id(other) or (hasattr(other, "base") and self.base == other.base)
def __hash__(self):
return hash(self.base)
def __getitem__(self, name):
"""
Retrieve the given attribute.
Args:
name (str): The attribute to retrieve.
Returns:
str: The value of the attribute.
"""
raise NotImplementedError()
def __enter__(self):
context = (
self.session.frame(self) if self.tag_name in {"frame", "iframe"}
else self.session.scope(self))
self._contexts.append(context)
return context.__enter__()
def __exit__(self, *args):
context = self._contexts.pop()
context.__exit__(*args)
@property
def text(self):
""" str: The text of the node. """
raise NotImplementedError()
@property
def all_text(self):
""" str: All of the text of the node. """
raise NotImplementedError()
@property
def visible_text(self):
""" str: Only the visible text of the node. """
raise NotImplementedError()
def synchronize(self, func=None, wait=None, errors=()):
"""
This method is Capybara's primary defense against asynchronicity problems. It works by
attempting to run a given decorated function until it succeeds. The exact behavior of this
method depends on a number of factors. Basically there are certain exceptions which, when
raised from the decorated function, instead of bubbling up, are caught, and the function is
re-run.
Certain drivers have no support for asynchronous processes. These drivers run the function,
and any error raised bubbles up immediately. This allows faster turn around in the case
where an expectation fails.
Only exceptions that are :exc:`ElementNotFound` or any subclass thereof cause the block to
be rerun. Drivers may specify additional exceptions which also cause reruns. This usually
occurs when a node is manipulated which no longer exists on the page. For example, the
Selenium driver specifies ``selenium.common.exceptions.StateElementReferenceException``.
As long as any of these exceptions are thrown, the function is re-run, until a certain
amount of time passes. The amount of time defaults to :data:`capybara.default_max_wait_time`
and can be overridden through the ``wait`` argument. This time is compared with the system
time to see how much time has passed. If the return value of ``time.time()`` is stubbed
out, Capybara will raise :exc:`FrozenInTime`.
Args:
func (Callable, optional): The function to decorate.
wait (int, optional): Number of seconds to retry this function.
errors (Tuple[Type[Exception]], optional): Exception types that cause the function to be
rerun. Defaults to ``driver.invalid_element_errors`` + :exc:`ElementNotFound`.
Returns:
Callable: The decorated function, or a decorator function.
Raises:
FrozenInTime: If the return value of ``time.time()`` appears stuck.
"""
def decorator(func):
@wraps(func)
def outer(*args, **kwargs):
seconds = wait if wait is not None else capybara.default_max_wait_time
def inner():
return func(*args, **kwargs)
if self.session.synchronized:
return inner()
else:
timer = Timer(seconds)
self.session.synchronized = True
try:
while True:
try:
return inner()
except Exception as e:
self.session.raise_server_error()
if not self._should_catch_error(e, errors):
raise
if timer.expired:
raise
sleep(0.05)
if timer.stalled:
raise FrozenInTime(
"time appears to be frozen, Capybara does not work with "
"libraries which freeze time, consider using time "
"traveling instead")
if capybara.automatic_reload:
self.reload()
finally:
self.session.synchronized = False
return outer
if func:
return decorator(func)
else:
return decorator
def _should_catch_error(self, error, errors=()):
"""
Returns whether to catch the given error.
Args:
error (Exception): The error to consider.
errors (Tuple[Type[Exception], ...], optional): The exception types that should be
caught. Defaults to :class:`ElementNotFound` plus any driver-specific invalid
element errors.
Returns:
bool: Whether to catch the given error.
"""
caught_errors = (
errors or
self.session.driver.invalid_element_errors + (ElementNotFound,))
return isinstance(error, caught_errors)
def _find_css(self, css):
return self.base._find_css(css)
def _find_xpath(self, xpath):
return self.base._find_xpath(xpath)
def synchronize(func):
""" Decorator for :meth:`synchronize`. """
@wraps(func)
def outer(self, *args, **kwargs):
@self.synchronize
def inner(self, *args, **kwargs):
return func(self, *args, **kwargs)
return inner(self, *args, **kwargs)
return outer
| {
"repo_name": "elliterate/capybara.py",
"path": "capybara/node/base.py",
"copies": "1",
"size": "7848",
"license": "mit",
"hash": 7362634739936522000,
"line_mean": 35.5023255814,
"line_max": 100,
"alpha_frac": 0.5819317023,
"autogenerated": false,
"ratio": 4.753482737734706,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5835414440034706,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from time import time as now
from flask import make_response
from mmmpaste import app
def runtime(fn):
"""
Add a header that shows the runtime of the route.
"""
@wraps(fn)
def wrapper(*args, **kwargs):
start = now()
response = make_response(fn(*args, **kwargs))
end = now()
response.headers["X-Runtime"] = "{0:.6f}s".format(end - start)
return response
return wrapper
def no_cache(fn):
"""
Add "Cache-Control: no-cache" header.
"""
@wraps(fn)
def wrapper(*args, **kwargs):
response = make_response(fn(*args, **kwargs))
response.cache_control.no_cache = True
return response
return wrapper
def cache(fn):
"""
Add "Cache-Control: s-maxage" header.
"""
@wraps(fn)
def wrapper(*args, **kwargs):
response = make_response(fn(*args, **kwargs))
response.cache_control.s_maxage = app.config.get('CACHE_S_MAXAGE')
return response
return wrapper
| {
"repo_name": "ryanc/mmmpaste",
"path": "mmmpaste/filters.py",
"copies": "1",
"size": "1030",
"license": "bsd-2-clause",
"hash": -1487712607201435100,
"line_mean": 22.9534883721,
"line_max": 74,
"alpha_frac": 0.6009708738,
"autogenerated": false,
"ratio": 3.7050359712230216,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9799166899742584,
"avg_score": 0.0013679890560875513,
"num_lines": 43
} |
from functools import wraps
from time import time
from datetime import timedelta
import Queue
class CancelJobException(Exception):
pass
class ProgressSection(object):
def __init__(self, name):
self.name = name
self.size = 100
self.progress = 0
self.on_change = None
self.on_done = None
self.started = time()
self.ended = None
def inc(self):
self.progress += 1
self.on_change()
def set_size(self, s):
self.size = s
self.on_change()
def done(self):
self.progress = self.size
self.ended = time()
self.on_change()
self.on_done()
@property
def took(self):
return timedelta(seconds=self.ended - self.started)
@property
def percentage(self):
if self.size:
return self.progress * 100 / self.size # watch -> no decimals in py2
class ProgressReporter(object):
def __init__(self, cancel_queue=None):
self.cancel_queue = cancel_queue
self.cancel = False
def check_cancel(self):
if self.cancel_queue is None:
return
if not self.cancel:
try:
self.cancel = self.cancel_queue.get_nowait()
except Queue.Empty:
pass
if self.cancel:
raise CancelJobException('The job was cancelled')
def on_change(self, section):
self.check_cancel()
def on_done(self, section):
pass
def on_section_add(self, section):
pass
def new_section(self, name):
section = ProgressSection(name)
def _on_change():
self.on_change(section)
def _on_done():
self.on_done(section)
section.on_change = _on_change
section.on_done = _on_done
return section
class ProgressReporterToPrint(ProgressReporter):
TEMPLATE = '{section.name}: {msg}'
def __init__(self, cancel_queue=None, delta_ms=None):
super(ProgressReporterToPrint, self).__init__(cancel_queue)
self.delta_ms = delta_ms
self._last_report = None
def on_section_add(self, section):
super(ProgressReporterToPrint, self).on_section_add(section)
print self.TEMPLATE.format(section=section, msg='START')
def on_change(self, section):
super(ProgressReporterToPrint, self).on_change(section)
if self.delta_ms is not None:
ts = time()
if self._last_report is None or (ts - self._last_report) * 1000 >= self.delta_ms:
msg = '{section.progress}/{section.size} [{section.percentage}%]'.format(section=section)
print self.TEMPLATE.format(section=section, msg=msg)
self._last_report = ts
def on_done(self, section):
super(ProgressReporterToPrint, self).on_done(section)
msg = 'DONE in {section.took}'.format(section=section)
print self.TEMPLATE.format(section=section, msg=msg)
reporter = None
def init(progress_reporter):
# bind a progress reporter to global object
global reporter
reporter = progress_reporter
def progress_log(section):
def decorator(method):
@wraps(method)
def f(*args, **kwargs):
global reporter
if reporter is None:
# the default reporter
init(ProgressReporter())
# inject the additional progress parameter
kwargs['progress'] = reporter.new_section(section)
return method(*args, **kwargs)
return f
return decorator
| {
"repo_name": "greginvm/grslicer",
"path": "grslicer/util/progress.py",
"copies": "1",
"size": "3601",
"license": "mit",
"hash": 4318803607644221400,
"line_mean": 24.3591549296,
"line_max": 105,
"alpha_frac": 0.5942793668,
"autogenerated": false,
"ratio": 4.129587155963303,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5223866522763302,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from time import time
from txmongo.errors import TimeExceeded
from twisted.internet import defer, reactor
def timeout(func):
"""Decorator to add timeout to Deferred calls"""
@wraps(func)
def _timeout(*args, **kwargs):
now = time()
deadline = kwargs.pop("deadline", None)
seconds = kwargs.pop("timeout", None)
if deadline is None and seconds is not None:
deadline = now + seconds
if deadline is not None and deadline < now:
raise TimeExceeded("TxMongo: run time exceeded by {0}s.".format(now-deadline))
kwargs['_deadline'] = deadline
raw_d = func(*args, **kwargs)
if deadline is None:
return raw_d
if seconds is None and deadline is not None and deadline - now > 0:
seconds = deadline - now
timeout_d = defer.Deferred()
times_up = reactor.callLater(seconds, timeout_d.callback, None)
def on_ok(result):
if timeout_d.called:
raw_d.cancel()
raise TimeExceeded("TxMongo: run time of {0}s exceeded.".format(seconds))
else:
times_up.cancel()
return result[0]
def on_fail(failure):
failure.trap(defer.FirstError)
assert failure.value.index == 0
times_up.cancel()
failure.value.subFailure.raiseException()
return defer.DeferredList([raw_d, timeout_d], fireOnOneCallback=True,
fireOnOneErrback=True, consumeErrors=True).addCallbacks(on_ok, on_fail)
return _timeout
def check_deadline(_deadline):
if _deadline is not None and _deadline < time():
raise TimeExceeded("TxMongo: now '{0}', deadline '{1}'".format(time(), _deadline))
def get_err(document, default=None):
err = document.get("err", None) or document.get("codeName", None)
errmsg = document.get("errmsg", None)
return ": ".join(filter(None, (err, errmsg))) or default
| {
"repo_name": "twisted/txmongo",
"path": "txmongo/utils/__init__.py",
"copies": "1",
"size": "2034",
"license": "apache-2.0",
"hash": 3253722569089378300,
"line_mean": 30.78125,
"line_max": 105,
"alpha_frac": 0.6032448378,
"autogenerated": false,
"ratio": 4.211180124223603,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001368776135780092,
"num_lines": 64
} |
from functools import wraps
from time import time
from atomic import AtomicLong
from metrology.stats import EWMA
from metrology.utils import now
def ticker(method):
@wraps(method)
def wrapper(self, *args, **kwargs):
self._tick()
return method(self, *args, **kwargs)
return wrapper
class Meter(object):
"""A meter measures the rate of events over time
(e.g., "requests per second").
In addition to the mean rate, you can also track 1, 5 and 15 minutes moving
averages ::
meter = Metrology.meter('requests')
meter.mark()
meter.count
"""
def __init__(self, average_class=EWMA):
self.counter = AtomicLong(0)
self.start_time = now()
self.last_tick = AtomicLong(self.start_time)
self.interval = EWMA.INTERVAL
self.m1_rate = EWMA.m1()
self.m5_rate = EWMA.m5()
self.m15_rate = EWMA.m15()
def _tick(self):
old_tick, new_tick = self.last_tick.value, time()
age = new_tick - old_tick
ticks = int(age / self.interval)
new_tick = old_tick + int(ticks * self.interval)
if ticks and self.last_tick.compare_and_swap(old_tick, new_tick):
for _ in range(ticks):
self.tick()
def __call__(self, *args, **kwargs):
if args and hasattr(args[0], '__call__'):
_orig_func = args[0]
def _decorator(*args, **kwargs):
with self:
return _orig_func(*args, **kwargs)
return _decorator
def __enter__(self):
pass
def __exit__(self, exc, exv, trace):
self.mark()
@property
def count(self):
"""Returns the total number of events that have been recorded."""
return self.counter.value
def clear(self):
self.counter.value = 0
self.start_time = time()
self.m1_rate.clear()
self.m5_rate.clear()
self.m15_rate.clear()
@ticker
def mark(self, value=1):
"""Record an event with the meter. By default it will record one event.
:param value: number of event to record
"""
self.counter += value
self.m1_rate.update(value)
self.m5_rate.update(value)
self.m15_rate.update(value)
def tick(self):
self.m1_rate.tick()
self.m5_rate.tick()
self.m15_rate.tick()
@property
@ticker
def one_minute_rate(self):
"""Returns the one-minute average rate."""
return self.m1_rate.rate
@property
@ticker
def five_minute_rate(self):
"""Returns the five-minute average rate."""
return self.m5_rate.rate
@property
@ticker
def fifteen_minute_rate(self):
"""Returns the fifteen-minute average rate."""
return self.m15_rate.rate
@property
def mean_rate(self):
"""
Returns the mean rate of the events since the start of the process.
"""
if self.counter.value == 0:
return 0.0
else:
elapsed = time() - self.start_time
return self.counter.value / elapsed
def stop(self):
pass
| {
"repo_name": "cyberdelia/metrology",
"path": "metrology/instruments/meter.py",
"copies": "1",
"size": "3172",
"license": "mit",
"hash": -2038951368017522700,
"line_mean": 24.7886178862,
"line_max": 79,
"alpha_frac": 0.5703026482,
"autogenerated": false,
"ratio": 3.723004694835681,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4793307343035681,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from time import time
from atomic import Atomic
from metrology.stats import EWMA
def ticker(method):
@wraps(method)
def wrapper(self, *args, **kwargs):
self._tick()
return method(self, *args, **kwargs)
return wrapper
class Meter(object):
"""A meter measures the rate of events over time (e.g., "requests per second").
In addition to the mean rate, you can also track 1, 5 and 15 minutes moving averages ::
meter = Metrology.meter('requests')
meter.mark()
meter.count
"""
def __init__(self, average_class=EWMA):
self.counter = Atomic(0)
self.start_time = time()
self.last_tick = Atomic(self.start_time)
self.interval = EWMA.INTERVAL
self.m1_rate = EWMA.m1()
self.m5_rate = EWMA.m5()
self.m15_rate = EWMA.m15()
def _tick(self):
old_tick, new_tick = self.last_tick.value, time()
age = new_tick - old_tick
ticks = int(age / self.interval)
new_tick = old_tick + (ticks * self.interval)
if ticks and self.last_tick.compare_and_swap(old_tick, new_tick):
for _ in range(ticks):
self.tick()
@property
def count(self):
"""Returns the total number of events that have been recorded."""
return self.counter.value
def clear(self):
self.counter.value = 0
self.start_time = time()
self.m1_rate.clear()
self.m5_rate.clear()
self.m15_rate.clear()
@ticker
def mark(self, value=1):
"""Record an event with the meter. By default it will record one event.
:param value: number of event to record
"""
self.counter.update(lambda v: v + value)
self.m1_rate.update(value)
self.m5_rate.update(value)
self.m15_rate.update(value)
def tick(self):
self.m1_rate.tick()
self.m5_rate.tick()
self.m15_rate.tick()
@property
@ticker
def one_minute_rate(self):
"""Returns the one-minute average rate."""
return self.m1_rate.rate
@property
@ticker
def five_minute_rate(self):
"""Returns the five-minute average rate."""
return self.m5_rate.rate
@property
@ticker
def fifteen_minute_rate(self):
"""Returns the fifteen-minute average rate."""
return self.m15_rate.rate
@property
def mean_rate(self):
"""Returns the mean rate of the events since the start of the process."""
if self.counter.value == 0:
return 0.0
else:
elapsed = time() - self.start_time
return self.counter.value / elapsed
def stop(self):
pass
| {
"repo_name": "zenoss/metrology",
"path": "metrology/instruments/meter.py",
"copies": "1",
"size": "2734",
"license": "mit",
"hash": -6441167025395991000,
"line_mean": 25.8039215686,
"line_max": 91,
"alpha_frac": 0.5881492319,
"autogenerated": false,
"ratio": 3.6550802139037435,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9741515257123676,
"avg_score": 0.0003428377360134581,
"num_lines": 102
} |
from functools import wraps
from time import time
from django_statsd.clients import statsd
class UnauthorizedException(Exception):
"""Failure to log into the email server."""
pass
class NewsletterException(Exception):
"""Error when trying to talk to the the email server."""
def __init__(self, msg=None, error_code=None, status_code=None):
self.error_code = error_code
self.status_code = status_code
super(NewsletterException, self).__init__(msg)
class NewsletterNoResultsException(NewsletterException):
"""
No results were returned from the mail server (but the request
didn't report any errors)
"""
pass
def get_timer_decorator(prefix):
"""
Decorator for timing and counting requests to the API
"""
def decorator(f):
@wraps(f)
def wrapped(*args, **kwargs):
starttime = time()
def record_timing():
totaltime = int((time() - starttime) * 1000)
statsd.timing(prefix + '.timing', totaltime)
statsd.timing(prefix + '.{}.timing'.format(f.__name__), totaltime)
statsd.incr(prefix + '.count')
statsd.incr(prefix + '.{}.count'.format(f.__name__))
try:
resp = f(*args, **kwargs)
except NewsletterException:
record_timing()
raise
record_timing()
return resp
return wrapped
return decorator
| {
"repo_name": "glogiotatidis/basket",
"path": "basket/news/backends/common.py",
"copies": "1",
"size": "1500",
"license": "mpl-2.0",
"hash": -2446390408270450700,
"line_mean": 25.7857142857,
"line_max": 82,
"alpha_frac": 0.586,
"autogenerated": false,
"ratio": 4.451038575667655,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0002151462994836489,
"num_lines": 56
} |
from functools import wraps
from time import time
from flask import request, g, Response, session, jsonify
import redis
__author__ = "David Lawrence"
__copyright__ = "Copyright 2015, David Lawrence"
__credits__ = ["David Lawrence"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "David Lawrence"
__email__ = "davi.lawrence@gmail.com"
__status__ = "Development"
try:
r = redis.Redis(host='localhost', port=9999, db=0)
except:
raise
def limit(requests=20, window=60, by="ip", group=None):
def decorator(f):
@wraps(f)
def wrapped(*args, **kwargs):
group = request.endpoint
key = ":".join(["rl", group, request.remote_addr])
try:
remaining = requests - int(r.get(key))
except (ValueError, TypeError):
remaining = requests
r.set(key, 0)
ttl = r.ttl(key)
if not ttl:
r.expire(key, window)
ttl = window
g.view_limits = (requests, remaining-1, time() + ttl)
print remaining
if remaining > 0:
r.incr(key, 1)
return f(*args, **kwargs)
else:
return Response("Too Many Requests", 429)
return wrapped
return decorator
def requires_auth(f):
@wraps(f)
def wrap_auth(*args, **kwargs):
if 'username' in session:
return f(*args, **kwargs)
else:
return jsonify(error=1)
return wrap_auth
def verify_needed_field(fields):
def wrap_verify(f):
@wraps(f)
def field_verifier(*args, **kwargs):
for field in fields:
if field not in request.form:
return jsonify(result=False)
return f(*args, **kwargs)
return field_verifier
return wrap_verify
def api_jsonify(f):
@wraps(f)
def wrap_jsonify(*args, **kwargs):
try:
res = f(*args, **kwargs)
is_successful = res[0]
if is_successful:
if len(res) > 1:
data = res[1]
data['error'] = False
return jsonify(data)
else:
return jsonify(error=False)
else:
return jsonify(error=True)
except:
return jsonify(error=True)
return wrap_jsonify
| {
"repo_name": "Hellorin/gaming-challenges-api",
"path": "fdk/apps/gaming_challenges/controllers/api/utils.py",
"copies": "1",
"size": "2421",
"license": "mit",
"hash": -8968448866598390000,
"line_mean": 25.3152173913,
"line_max": 65,
"alpha_frac": 0.5159025196,
"autogenerated": false,
"ratio": 4.048494983277592,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0020380434782608695,
"num_lines": 92
} |
from functools import wraps
from time import time
import collections
class RateLimitExceeded(Exception):
pass
class TokenBucket(object):
"""An implementation of the token bucket algorithm.
>>> bucket = TokenBucket(80, 0.5)
>>> print bucket.consume(10)
True
>>> print bucket.consume(90)
False
From https://code.activestate.com/recipes/511490-implementation-of-the-token-bucket-algorithm/
"""
def __init__(self, tokens, fill_rate):
"""tokens is the total tokens in the bucket. fill_rate is the
rate in tokens/second that the bucket will be refilled."""
self.capacity = float(tokens)
self._tokens = float(tokens)
self.fill_rate = float(fill_rate)
self.timestamp = time()
def can_consume(self, tokens):
return tokens <= self.tokens
def consume(self, tokens):
"""Consume tokens from the bucket. Returns True if there were
sufficient tokens otherwise False."""
if tokens <= self.tokens:
self._tokens -= tokens
else:
return False
return True
def get_tokens(self):
if self._tokens < self.capacity:
now = time()
delta = self.fill_rate * (now - self.timestamp)
self._tokens = min(self.capacity, self._tokens + delta)
self.timestamp = now
return self._tokens
tokens = property(get_tokens)
class MessageTokenBucket:
def __init__(self, global_tokens, server_tokens, channel_tokens, user_tokens, fill_rate):
self.all = TokenBucket(global_tokens, fill_rate)
self.servers = collections.defaultdict(lambda: TokenBucket(server_tokens, fill_rate))
self.channels = collections.defaultdict(lambda: TokenBucket(channel_tokens, fill_rate))
self.users = collections.defaultdict(lambda: TokenBucket(user_tokens, fill_rate))
def consume(self, message):
if message.channel.is_private:
return self.users[message.author.id].consume(1)
else:
buckets = {
"global": self.all,
"server " + message.channel.server.name: self.servers[message.channel.server.id],
"channel " + message.channel.name: self.channels[message.channel.id],
"user " + message.author.name: self.users[message.author.id],
}
for key, bucket in buckets.items():
if not bucket.can_consume(1):
raise RateLimitExceeded("Rate limit exceeded for {} ({}/{} with fill rate={})".format(
key,
bucket.tokens,
bucket.capacity,
bucket.fill_rate
))
for bucket in buckets.values():
bucket.consume(1)
def rate_limit(burst_size=10, fill_rate=0.5):
bucket = TokenBucket(burst_size, fill_rate)
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
if bucket.consume(1):
return f(*args, **kwargs)
else:
raise RateLimitExceeded()
return wrapper
return decorator
| {
"repo_name": "sk89q/Plumeria",
"path": "plumeria/util/ratelimit.py",
"copies": "1",
"size": "3180",
"license": "mit",
"hash": -5318379132595142000,
"line_mean": 32.125,
"line_max": 106,
"alpha_frac": 0.5883647799,
"autogenerated": false,
"ratio": 4.350205198358413,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5438569978258413,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from time import time, sleep
import warnings
import sys # for verbose exception information
from inspect import getargspec
def stringified_warnings(warn_list):
return [str(w) for w in warn_list]
def meta_func(ignore_errors=False):
_LogInfoDict = dict()
def decorating_function(user_function):
@wraps(user_function)
def wrapper(*args,**kwargs):
_LogInfoDict['argspec'] = getargspec(user_function)
_LogInfoDict['args'] = args if args else None
_LogInfoDict['kwargs'] = kwargs if kwargs else None
_LogInfoDict['error_info'] = None
_LogInfoDict['func_name'] = user_function.__name__
with warnings.catch_warnings(record=True) as w:
_LogInfoDict['time_started'] = time()
warnings.simplefilter("always")
try:
_LogInfoDict['return_value'] = user_function(*args,**kwargs)
except:
_LogInfoDict['error_info'] = sys.exc_info()
_LogInfoDict['return_value'] = None
_LogInfoDict['time_ended'] = time()
_LogInfoDict['time_elapsed'] = round(_LogInfoDict['time_ended'] - _LogInfoDict['time_started'],6) # millionths of sec
_LogInfoDict['warnings'] = stringified_warnings(w) if w else None
if ignore_errors is False and _LogInfoDict['error_info']:
raise _LogInfoDict['error_info'][0]
return _LogInfoDict['return_value']
def log_info():
return _LogInfoDict
wrapper.__wrapped__ = user_function
wrapper.log_info = log_info
return wrapper
return decorating_function | {
"repo_name": "gstaubli/meta_func",
"path": "meta_func/meta_func.py",
"copies": "1",
"size": "1466",
"license": "mit",
"hash": 396362320809397900,
"line_mean": 34.7804878049,
"line_max": 121,
"alpha_frac": 0.7039563438,
"autogenerated": false,
"ratio": 3.2723214285714284,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44762777723714287,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from transmute_core.exceptions import APIException, NoSerializerFound
from transmute_core.function.signature import NoDefault
from transmute_core import ParamExtractor, NoArgument
from sanic.response import HTTPResponse
from sanic.exceptions import SanicException
DEFAULT_HTTP_CONTENT_TYPE = "application/octet-stream"
def create_handler(transmute_func, context):
@wraps(transmute_func.raw_func)
async def handler(request, *args, **kwargs):
exc, result = None, None
try:
args, kwargs = await extract_params(request, context,
transmute_func)
result = await transmute_func.raw_func(*args, **kwargs)
except SanicException as se:
code = se.status_code or 400
exc = APIException(message=str(se), code=code)
except Exception as e:
exc = e
content_type = request.headers.get("Content-Type", DEFAULT_HTTP_CONTENT_TYPE)
response = transmute_func.process_result(
context, result, exc, content_type
)
return HTTPResponse(
status=response["code"],
content_type=response["content-type"],
headers=response["headers"],
body_bytes=response["body"],
)
handler.transmute_func = transmute_func
return handler
async def extract_params(request, context, transmute_func):
body = request.body
content_type = request.headers.get("Content-Type", DEFAULT_HTTP_CONTENT_TYPE)
extractor = ParamExtractorSanic(request, body)
return extractor.extract_params(
context, transmute_func, content_type
)
class ParamExtractorSanic(ParamExtractor):
def __init__(self, request, body):
self._request = request
self._body = body
def _get_framework_args(self):
return {"request": self._request}
@property
def body(self):
return self._body
def _query_argument(self, key, is_list):
if key not in self._request.args:
return NoArgument
if is_list:
return self._request.args.getlist(key)
else:
return self._request.args.get(key)
def _header_argument(self, key):
return self._request.headers.get(key, NoArgument)
def _path_argument(self, key):
kargs = self._request.app.router.get(self._request)[2]
return kargs.get(key, NoArgument)
| {
"repo_name": "yunstanford/sanic-transmute",
"path": "sanic_transmute/handler.py",
"copies": "1",
"size": "2457",
"license": "mit",
"hash": -91322083733984720,
"line_mean": 31.3289473684,
"line_max": 85,
"alpha_frac": 0.6402116402,
"autogenerated": false,
"ratio": 4.136363636363637,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5276575276563636,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from tryhaskell import TryHaskell
from helga.plugins import command
def clean_output(f):
@wraps(f)
def wrapper(*args, **kwargs):
try:
result = f(*args, **kwargs)
except TryHaskell.Error as e:
result = 'Uh oh: ' + e.message
# Truncate output and replace newlines to send only one message.
return result.replace('\n', ' ')[:300] if result else result
return wrapper
def show_type(result):
return ' :: '.join([result.expr.strip(), result.type])
def show_value(result, display_errors):
if not result.ok:
if not display_errors:
return None
# Reduce multiple white spaces with a single space.
msg = ' '.join(result.value.split())
return 'ERROR: ' + msg
if result.stdout:
return ' '.join(result.stdout)
if result.value:
return result.value
return show_type(result)
@command('haskell', aliases=['h'],
help='Run haskell expressions. Usage: helga h(askell) (:t) <expression>')
@clean_output
def haskell(client, channel, nick, message, cmd, args):
# Only show errors when using the explicit !haskell command.
display_errors = cmd == 'haskell'
if ':t' in args[:1]:
_, exp = message.split(':t', 1)
r = TryHaskell.get(exp)
return show_type(r) if r.ok else show_value(r, display_errors)
return show_value(TryHaskell.get(' '.join(args)), display_errors)
| {
"repo_name": "carymrobbins/helga-haskell",
"path": "helga_haskell.py",
"copies": "1",
"size": "1471",
"license": "bsd-3-clause",
"hash": 5710476609196519000,
"line_mean": 30.9782608696,
"line_max": 82,
"alpha_frac": 0.6240652617,
"autogenerated": false,
"ratio": 3.5965770171149143,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9719332692644144,
"avg_score": 0.00026191723415400735,
"num_lines": 46
} |
from functools import wraps
from types import FunctionType
from typing import Generator, Callable, Iterable, overload
def filled_iter(iter, filler):
"""Inbetween every iteration, yield a constant filler."""
first = True
for elem in iter:
if not first:
yield filler
else:
first = False
yield elem
class LazilyJoined:
"""A string that consists of multiple components
NOTE: Just like a generator, it will be exhausted after the first call!
"""
_components: Iterable
def __init__(self, components: Iterable, glue: str = ""):
self.glue = glue
self._components = components
self.exhausted = False
@property
def _stringified_components(self):
return ((str(c) if c is not None else "") for c in self._components)
def __str__(self):
self._mark_exhausted()
return self.glue.join(self._stringified_components)
def __iter__(self):
self._mark_exhausted()
if self.glue:
return filled_iter(self._stringified_components, filler=self.glue)
return iter(self._stringified_components)
def _mark_exhausted(self):
if self.exhausted:
raise RuntimeError("LazyJoined object already exhausted!"
" You may call __str__ or __iter__ only once."
" For re-use, turn it into a list.")
self.exhausted = True
DecoratedInType = Callable[..., (Generator[str, None, None])]
DecoratedOutType = Callable[..., LazilyJoined]
@overload
def lazy_join(func_or_glue: DecoratedInType) -> DecoratedOutType:
...
@overload
def lazy_join(func_or_glue: str) -> Callable[[DecoratedInType], DecoratedOutType]:
...
def lazy_join(func_or_glue):
if type(func_or_glue) == FunctionType:
# Return the wrapped function
return LazyJoinDecorator()(func=func_or_glue)
# Return the decorator
return LazyJoinDecorator(glue=func_or_glue)
class LazyJoinDecorator:
def __init__(self, glue: str = ""):
self.glue = glue
def __call__(self, func: DecoratedInType) -> DecoratedOutType:
@wraps(func)
def wrapped(*a, **kw):
return LazilyJoined(func(*a, **kw), glue=self.glue)
return wrapped
| {
"repo_name": "lukasjuhrich/pycroft",
"path": "web/blueprints/helpers/lazy_join.py",
"copies": "1",
"size": "2295",
"license": "apache-2.0",
"hash": 6513167815679903000,
"line_mean": 28.8051948052,
"line_max": 82,
"alpha_frac": 0.6222222222,
"autogenerated": false,
"ratio": 3.768472906403941,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4890695128603941,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from types import MethodType
from sys import version_info
if version_info < (3, 0, 0):
from inspect import getargspec
else:
from inspect import getfullargspec as getargspec
try:
from collections.abc import Callable
except ImportError:
from collections import Callable
from pyvalid.__exceptions import InvalidArgumentNumberError, ArgumentValidationError
from pyvalid.switch import is_enabled
class Accepts(Callable):
"""
``pyvalid.accepts(*allowed_arg_values, **allowed_kwargs_values)``
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
The decorator which validates input parameters of the wrapped function.
To use it, we need to specify the list of allowed types or values. If the function’s
input doesn’t match the allowed types/values, one of the following errors will be
thrown:
* ``pyvalid.ArgumentValidationError`` — when the actual type/value of the function’s
argument is different from the expected one;
* ``pyvalid.InvalidArgumentNumberError`` — when the number/position of function’s
arguments is incorrect.
Examples of usage:
Let's define the ``multiply`` function, which accepts only ``int`` values, and see
how does it work with other types.
.. code-block:: python
from pyvalid import accepts
@accepts(int, int)
def multiply(num_1, num_2):
return num_1 * num_2
multiply(4, 2)
# Returns 8.
multiply(3.14, 8)
# Raises the ArgumentValidationError exception, since the 1st argument is the
# float value, when we're expecting int values only.
multiply(3, 'pyvalid')
# Raises the ArgumentValidationError exception, since the 2nd argument is the
# str value, when we're expecting int values only.
multiply(128)
# Raises the InvalidArgumentNumberError exception, since the second argument
# is missing.
"""
def __init__(self, *allowed_arg_values, **allowed_kwargs_values):
self.allowed_arg_values = allowed_arg_values
self.allowed_kwargs_values = allowed_kwargs_values
self.allowed_params = list()
self.optional_args = list()
def __call__(self, func):
@wraps(func)
def decorator_wrapper(*func_args, **func_kwargs):
perform_validation = all((
is_enabled(),
self.allowed_arg_values or self.allowed_kwargs_values
))
if perform_validation:
# Forget all information about function arguments.
self.allowed_params[:] = list()
self.optional_args[:] = list()
# Collect information about fresh arguments.
args_info = getargspec(func)
self.__scan_func(args_info)
self.__pep_0468_fix(func)
# Validate function arguments.
self.__validate_args(func, func_args, func_kwargs)
# Call function.
return func(*func_args, **func_kwargs)
return decorator_wrapper
def __wrap_allowed_val(self, value):
"""Wrap allowed value in the list if not wrapped yet.
"""
if isinstance(value, tuple):
value = list(value)
elif not isinstance(value, list):
value = [value]
return value
def __scan_func(self, args_info):
"""Collects information about allowed values in the following format:
.. code-block:: python
(
(<argument name>, <allowed types and values>),
(<argument name>, <allowed types and values>),
...
)
Args:
args_info (inspect.FullArgSpec):
Information about function arguments.
"""
# Process args.
for i, allowed_val in enumerate(self.allowed_arg_values):
allowed_val = self.__wrap_allowed_val(allowed_val)
# Add default value (if exists) in list of allowed values.
if args_info.defaults:
def_range = len(args_info.defaults) - len(args_info.args[i:])
if def_range >= 0:
self.optional_args.append(i)
default_value = args_info.defaults[def_range]
allowed_val.append(default_value)
# Try to detect current argument name.
if len(args_info.args) > i:
arg_name = args_info.args[i]
else:
arg_name = None
self.optional_args.append(i)
# Save info about current argument and his allowed values.
self.allowed_params.append((arg_name, allowed_val))
# Process kwargs.
for arg_name, allowed_val in self.allowed_kwargs_values.items():
allowed_val = self.__wrap_allowed_val(allowed_val)
# Mark current argument as optional.
i = len(self.allowed_params)
self.optional_args.append(i)
# Save info about current argument and his allowed values.
self.allowed_params.append((arg_name, allowed_val))
def __validate_args(self, func, args, kwargs):
"""Compare value of each required argument with list of allowed values.
Args:
func (types.FunctionType):
Function to validate.
args (list):
Collection of the position arguments.
kwargs (dict):
Collection of the keyword arguments.
Raises:
InvalidArgumentNumberError:
When position or count of the arguments is incorrect.
ArgumentValidationError:
When encountered unexpected argument value.
"""
from pyvalid.validators import Validator
for i, (arg_name, allowed_values) in enumerate(self.allowed_params):
if i < len(args):
value = args[i]
else:
if arg_name in kwargs:
value = kwargs[arg_name]
elif i in self.optional_args:
continue
else:
raise InvalidArgumentNumberError(func)
is_valid = False
for allowed_val in allowed_values:
is_validator = (
isinstance(allowed_val, Validator) or
(
isinstance(allowed_val, MethodType) and
hasattr(allowed_val, '__func__') and
isinstance(allowed_val.__func__, Validator)
)
)
if is_validator:
is_valid = allowed_val(value)
elif isinstance(allowed_val, type):
is_valid = isinstance(value, allowed_val)
else:
is_valid = value == allowed_val
if is_valid:
break
if not is_valid:
ord_num = self.__ordinal(i + 1)
raise ArgumentValidationError(func, ord_num, value, allowed_values)
def __ordinal(self, num):
"""Returns the ordinal number of a given integer, as a string.
eg. 1 -> 1st, 2 -> 2nd, 3 -> 3rd, etc.
"""
if 10 <= num % 100 < 20:
return str(num) + 'th'
else:
ord_info = {1: 'st', 2: 'nd', 3: 'rd'}.get(num % 10, 'th')
return '{}{}'.format(num, ord_info)
def __pep_0468_fix(self, func):
"""Fixes the issue with preserving the order of function's arguments. So far,
the issue exists in the Python 3.5 only. More details can be found on the
"PEP 468" page: https://www.python.org/dev/peps/pep-0468/
"""
is_broken_py = (version_info.major, version_info.minor) == (3, 5)
if not is_broken_py:
return False
from inspect import signature, Parameter
func_signature = signature(func)
func_parameters = func_signature.parameters.values()
parameters_order = dict()
for param_index, param in enumerate(func_parameters):
if param.kind is Parameter.VAR_KEYWORD:
continue
parameters_order[param.name] = param_index
last_param_pos = len(self.allowed_params)
self.allowed_params.sort(
key=lambda param: parameters_order.get(param[0], last_param_pos)
)
| {
"repo_name": "uzumaxy/pyvalid",
"path": "pyvalid/__accepts.py",
"copies": "1",
"size": "8488",
"license": "bsd-3-clause",
"hash": 4001766825676435000,
"line_mean": 37.3529411765,
"line_max": 88,
"alpha_frac": 0.5657149599,
"autogenerated": false,
"ratio": 4.547210300429184,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0006322850384177873,
"num_lines": 221
} |
from functools import wraps
from types import MethodType
from typing import Any, Callable, Iterable, Union
import simpy
from desmod.pool import Pool
from desmod.queue import ItemType, Queue
ProbeCallback = Callable[[Any], None]
ProbeCallbacks = Iterable[ProbeCallback]
ProbeTarget = Union[
Pool, Queue[ItemType], simpy.Resource, simpy.Store, simpy.Container, MethodType
]
def attach(
scope: str, target: ProbeTarget, callbacks: ProbeCallbacks, **hints: Any
) -> None:
if isinstance(target, MethodType):
_attach_method(target, callbacks)
elif isinstance(target, simpy.Container):
_attach_container_level(target, callbacks)
elif isinstance(target, simpy.Store):
_attach_store_items(target, callbacks)
elif isinstance(target, simpy.Resource):
if hints.get('trace_queue'):
_attach_resource_queue(target, callbacks)
else:
_attach_resource_users(target, callbacks)
elif isinstance(target, Queue):
if hints.get('trace_remaining', False):
_attach_queue_remaining(target, callbacks)
else:
_attach_queue_size(target, callbacks)
elif isinstance(target, Pool):
if hints.get('trace_remaining', False):
_attach_pool_remaining(target, callbacks)
else:
_attach_pool_level(target, callbacks)
else:
raise TypeError(f'Cannot probe {scope} of type {type(target)}')
def _attach_method(method: MethodType, callbacks: ProbeCallbacks) -> None:
def make_wrapper(func):
@wraps(func)
def wrapper(*args, **kwargs):
value = func(*args, **kwargs)
for callback in callbacks:
callback(value)
return value
return wrapper
setattr(method.__self__, method.__func__.__name__, make_wrapper(method))
def _attach_container_level(
container: simpy.Container, callbacks: ProbeCallbacks
) -> None:
def make_wrapper(func):
@wraps(func)
def wrapper(*args, **kwargs):
old_level = container._level
ret = func(*args, **kwargs)
new_level = container._level
if new_level != old_level:
for callback in callbacks:
callback(new_level)
return ret
return wrapper
container._do_get = make_wrapper(container._do_get) # type: ignore
container._do_put = make_wrapper(container._do_put) # type: ignore
def _attach_store_items(store: simpy.Store, callbacks: ProbeCallbacks) -> None:
def make_wrapper(func):
@wraps(func)
def wrapper(*args, **kwargs):
old_items = len(store.items)
ret = func(*args, **kwargs)
new_items = len(store.items)
if new_items != old_items:
for callback in callbacks:
callback(new_items)
return ret
return wrapper
store._do_get = make_wrapper(store._do_get) # type: ignore
store._do_put = make_wrapper(store._do_put) # type: ignore
def _attach_resource_users(resource: simpy.Resource, callbacks: ProbeCallbacks) -> None:
def make_wrapper(func):
@wraps(func)
def wrapper(*args, **kwargs):
old_users = len(resource.users)
ret = func(*args, **kwargs)
new_users = len(resource.users)
if new_users != old_users:
for callback in callbacks:
callback(new_users)
return ret
return wrapper
resource._do_get = make_wrapper(resource._do_get) # type: ignore
resource._do_put = make_wrapper(resource._do_put) # type: ignore
def _attach_resource_queue(resource: simpy.Resource, callbacks: ProbeCallbacks) -> None:
def make_wrapper(func):
@wraps(func)
def wrapper(*args, **kwargs):
old_queue = len(resource.queue)
ret = func(*args, **kwargs)
new_queue = len(resource.queue)
if new_queue != old_queue:
for callback in callbacks:
callback(new_queue)
return ret
return wrapper
resource.request = make_wrapper(resource.request) # type: ignore
resource._trigger_put = make_wrapper(resource._trigger_put) # type: ignore
def _attach_queue_size(queue: Queue[ItemType], callbacks: ProbeCallbacks) -> None:
def hook():
for callback in callbacks:
callback(queue.size)
queue._put_hook = queue._get_hook = hook
def _attach_queue_remaining(queue: Queue[ItemType], callbacks: ProbeCallbacks) -> None:
def hook():
for callback in callbacks:
callback(queue.remaining)
queue._put_hook = queue._get_hook = hook
def _attach_pool_level(pool: Pool, callbacks: ProbeCallbacks) -> None:
def hook():
for callback in callbacks:
callback(pool.level)
pool._put_hook = pool._get_hook = hook
def _attach_pool_remaining(pool: Pool, callbacks: ProbeCallbacks) -> None:
def hook():
for callback in callbacks:
callback(pool.remaining)
pool._put_hook = pool._get_hook = hook
| {
"repo_name": "SanDisk-Open-Source/desmod",
"path": "desmod/probe.py",
"copies": "1",
"size": "5146",
"license": "mit",
"hash": -8329293910590438000,
"line_mean": 30.7654320988,
"line_max": 88,
"alpha_frac": 0.6129032258,
"autogenerated": false,
"ratio": 4.017174082747853,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00035671914291260593,
"num_lines": 162
} |
from functools import wraps
from types import MethodType
from django.conf import settings
from django.db import connections
from django_perf_rec.orm import patch_ORM_to_be_deterministic
from django_perf_rec.settings import perf_rec_settings
from django_perf_rec.sql import sql_fingerprint
from django_perf_rec.utils import sorted_names
class DBOp:
def __init__(self, alias, sql):
self.alias = alias
self.sql = sql
def __repr__(self):
return "DBOp({!r}, {!r})".format(repr(self.alias), repr(self.sql))
def __eq__(self, other):
return (
isinstance(other, DBOp)
and self.alias == other.alias
and self.sql == other.sql
)
class DBRecorder:
"""
Monkey-patch-wraps a database connection to call 'callback' on every
query it runs.
"""
def __init__(self, alias, callback):
self.alias = alias
self.callback = callback
def __enter__(self):
"""
When using the debug cursor wrapper, Django calls
connection.ops.last_executed_query to get the SQL from the client
library. Here we wrap this function on the connection to grab the SQL
as it comes out.
"""
patch_ORM_to_be_deterministic()
connection = connections[self.alias]
self.orig_force_debug_cursor = connection.force_debug_cursor
connection.force_debug_cursor = True
def call_callback(func):
alias = self.alias
callback = self.callback
@wraps(func)
def inner(self, *args, **kwargs):
sql = func(*args, **kwargs)
hide_columns = perf_rec_settings.HIDE_COLUMNS
callback(
DBOp(
alias=alias, sql=sql_fingerprint(sql, hide_columns=hide_columns)
)
)
return sql
return inner
self.orig_last_executed_query = connection.ops.last_executed_query
connection.ops.last_executed_query = MethodType(
call_callback(connection.ops.last_executed_query), connection.ops
)
def __exit__(self, exc_type, exc_value, exc_traceback):
connection = connections[self.alias]
connection.force_debug_cursor = False
connection.ops.last_executed_query = self.orig_last_executed_query
class AllDBRecorder:
"""
Launches DBRecorders on all database connections
"""
def __init__(self, callback):
self.callback = callback
def __enter__(self):
self.recorders = []
for alias in sorted_names(settings.DATABASES.keys()):
recorder = DBRecorder(alias, self.callback)
recorder.__enter__()
self.recorders.append(recorder)
def __exit__(self, type_, value, traceback):
for recorder in reversed(self.recorders):
recorder.__exit__(type_, value, traceback)
self.recorders = []
| {
"repo_name": "YPlan/django-perf-rec",
"path": "src/django_perf_rec/db.py",
"copies": "1",
"size": "2980",
"license": "mit",
"hash": -3930953546372278000,
"line_mean": 29.4081632653,
"line_max": 88,
"alpha_frac": 0.6006711409,
"autogenerated": false,
"ratio": 4.185393258426966,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5286064399326966,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from types import MethodType
try:
from collections.abc import Callable
except ImportError:
from collections import Callable
from pyvalid.__exceptions import InvalidReturnTypeError
from pyvalid.switch import is_enabled
class Returns(Callable):
"""
``pyvalid.returns(*allowed_return_values)``
+++++++++++++++++++++++++++++++++++++++++++
The decorator which validates the value returned by the wrapped function.
To use it, we need to specify the list of expected return types or values. If the
function’s return value doesn’t match the allowed types/values, the
``pyvalid.InvalidReturnTypeError`` error will be thrown.
Examples of usage:
Let's define the ``multiply`` function, which returns only ``int`` values, and see
how does it work with other types.
.. code-block:: python
from pyvalid import returns
@returns(int)
def multiply(num_1, num_2):
return num_1 * num_2
multiply(4, 2)
# Returns 8.
multiply(3.14, 8)
# Raises the InvalidReturnTypeError exception, since the function returns the
# float value, when we're expecting int values only.
multiply(3, 'pyvalid')
# Raises the InvalidReturnTypeError exception, since the function returns the
# str value, when we're expecting int values only.
"""
def __init__(self, *allowed_return_values):
self.allowed_return_values = allowed_return_values
def __call__(self, func):
@wraps(func)
def decorator_wrapper(*func_args, **func_kwargs):
from pyvalid.validators import Validator
returns_val = func(*func_args, **func_kwargs)
if is_enabled() and self.allowed_return_values:
is_valid = False
for allowed_val in self.allowed_return_values:
if isinstance(allowed_val, (Validator, MethodType)):
if isinstance(allowed_val, Validator):
is_valid = allowed_val(returns_val)
elif (isinstance(allowed_val, MethodType) and
hasattr(allowed_val, '__func__') and
isinstance(allowed_val.__func__, Validator)):
is_valid = allowed_val(returns_val)
elif isinstance(allowed_val, type):
is_valid = isinstance(
returns_val, allowed_val
)
else:
is_valid = returns_val == allowed_val
if is_valid:
break
if not is_valid:
raise InvalidReturnTypeError(
func, returns_val, self.allowed_return_values
)
return returns_val
return decorator_wrapper
| {
"repo_name": "uzumaxy/pyvalid",
"path": "pyvalid/__returns.py",
"copies": "1",
"size": "2941",
"license": "bsd-3-clause",
"hash": 3237704383247602700,
"line_mean": 35.2592592593,
"line_max": 86,
"alpha_frac": 0.5658835546,
"autogenerated": false,
"ratio": 4.903171953255426,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005725675778246392,
"num_lines": 81
} |
from functools import wraps
from typing import Any, Callable, Dict, Mapping, Set, Tuple, Union, cast
from django.http import HttpRequest, HttpResponse
from django.urls import path
from django.urls.resolvers import URLPattern
from django.utils.cache import add_never_cache_headers
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from zerver.decorator import (
authenticated_json_view,
authenticated_rest_api_view,
authenticated_uploads_api_view,
process_as_post,
)
from zerver.lib.exceptions import MissingAuthenticationError
from zerver.lib.response import json_method_not_allowed
from zerver.lib.types import ViewFuncT
METHODS = ('GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'PATCH')
FLAGS = ('override_api_url_scheme')
def default_never_cache_responses(view_func: ViewFuncT) -> ViewFuncT:
"""Patched version of the standard Django never_cache_responses
decorator that adds headers to a response so that it will never be
cached, unless the view code has already set a Cache-Control
header.
"""
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
response = view_func(request, *args, **kwargs)
if response.has_header("Cache-Control"):
return response
add_never_cache_headers(response)
return response
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
@default_never_cache_responses
@csrf_exempt
def rest_dispatch(request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Dispatch to a REST API endpoint.
Unauthenticated endpoints should not use this, as authentication is verified
in the following ways:
* for paths beginning with /api, HTTP basic auth
* for paths beginning with /json (used by the web client), the session token
This calls the function named in kwargs[request.method], if that request
method is supported, and after wrapping that function to:
* protect against CSRF (if the user is already authenticated through
a Django session)
* authenticate via an API key (otherwise)
* coerce PUT/PATCH/DELETE into having POST-like semantics for
retrieving variables
Any keyword args that are *not* HTTP methods are passed through to the
target function.
Never make a urls.py pattern put user input into a variable called GET, POST,
etc, as that is where we route HTTP verbs to target functions.
"""
supported_methods: Dict[str, Any] = {}
if hasattr(request, "saved_response"):
# For completing long-polled Tornado requests, we skip the
# view function logic and just return the response.
return request.saved_response
# duplicate kwargs so we can mutate the original as we go
for arg in list(kwargs):
if arg in METHODS:
supported_methods[arg] = kwargs[arg]
del kwargs[arg]
if 'GET' in supported_methods:
supported_methods.setdefault('HEAD', supported_methods['GET'])
if request.method == 'OPTIONS':
response = HttpResponse(status=204) # No content
response['Allow'] = ', '.join(sorted(supported_methods.keys()))
return response
# Override requested method if magic method=??? parameter exists
method_to_use = request.method
if request.POST and 'method' in request.POST:
method_to_use = request.POST['method']
if method_to_use in supported_methods:
entry = supported_methods[method_to_use]
if isinstance(entry, tuple):
target_function, view_flags = entry
else:
target_function = supported_methods[method_to_use]
view_flags = set()
# Set request._query for update_activity_user(), which is called
# by some of the later wrappers.
request._query = target_function.__name__
# We want to support authentication by both cookies (web client)
# and API keys (API clients). In the former case, we want to
# do a check to ensure that CSRF etc is honored, but in the latter
# we can skip all of that.
#
# Security implications of this portion of the code are minimal,
# as we should worst-case fail closed if we miscategorise a request.
# for some special views (e.g. serving a file that has been
# uploaded), we support using the same URL for web and API clients.
if ('override_api_url_scheme' in view_flags and
request.META.get('HTTP_AUTHORIZATION', None) is not None):
# This request uses standard API based authentication.
# For override_api_url_scheme views, we skip our normal
# rate limiting, because there are good reasons clients
# might need to (e.g.) request a large number of uploaded
# files or avatars in quick succession.
target_function = authenticated_rest_api_view(skip_rate_limiting=True)(target_function)
elif ('override_api_url_scheme' in view_flags and
request.GET.get('api_key') is not None):
# This request uses legacy API authentication. We
# unfortunately need that in the React Native mobile apps,
# because there's no way to set HTTP_AUTHORIZATION in
# React Native. See last block for rate limiting notes.
target_function = authenticated_uploads_api_view(skip_rate_limiting=True)(target_function)
# /json views (web client) validate with a session token (cookie)
elif not request.path.startswith("/api") and request.user.is_authenticated:
# Authenticated via sessions framework, only CSRF check needed
auth_kwargs = {}
if 'override_api_url_scheme' in view_flags:
auth_kwargs["skip_rate_limiting"] = True
target_function = csrf_protect(authenticated_json_view(target_function, **auth_kwargs))
# most clients (mobile, bots, etc) use HTTP basic auth and REST calls, where instead of
# username:password, we use email:apiKey
elif request.META.get('HTTP_AUTHORIZATION', None):
# Wrap function with decorator to authenticate the user before
# proceeding
target_function = authenticated_rest_api_view(
allow_webhook_access='allow_incoming_webhooks' in view_flags,
)(target_function)
elif request.path.startswith("/json") and 'allow_anonymous_user_web' in view_flags:
# For endpoints that support anonymous web access, we do that.
# TODO: Allow /api calls when this is stable enough.
auth_kwargs = dict(allow_unauthenticated=True)
target_function = csrf_protect(authenticated_json_view(
target_function, **auth_kwargs))
else:
# Otherwise, throw an authentication error; our middleware
# will generate the appropriate HTTP response.
raise MissingAuthenticationError()
if request.method not in ["GET", "POST"]:
# process_as_post needs to be the outer decorator, because
# otherwise we might access and thus cache a value for
# request.REQUEST.
target_function = process_as_post(target_function)
return target_function(request, **kwargs)
return json_method_not_allowed(list(supported_methods.keys()))
def rest_path(
route: str,
kwargs: Mapping[str, object] = {},
**handlers: Union[Callable[..., HttpResponse], Tuple[Callable[..., HttpResponse], Set[str]]],
) -> URLPattern:
return path(route, rest_dispatch, {**kwargs, **handlers})
| {
"repo_name": "kou/zulip",
"path": "zerver/lib/rest.py",
"copies": "2",
"size": "7717",
"license": "apache-2.0",
"hash": 9183327914192949000,
"line_mean": 44.6627218935,
"line_max": 102,
"alpha_frac": 0.6665802773,
"autogenerated": false,
"ratio": 4.335393258426966,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0014406713838640936,
"num_lines": 169
} |
from functools import wraps
from typing import Any, Callable, Dict
from django.conf import settings
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
from django.utils.cache import add_never_cache_headers
from django.utils.module_loading import import_string
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from zerver.decorator import (
ReturnT,
authenticated_json_view,
authenticated_rest_api_view,
authenticated_uploads_api_view,
process_as_post,
)
from zerver.lib.response import json_method_not_allowed, json_unauthorized
METHODS = ('GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'PATCH')
FLAGS = ('override_api_url_scheme')
def default_never_cache_responses(
view_func: Callable[..., HttpResponse]) -> Callable[..., HttpResponse]:
"""Patched version of the standard Django never_cache_responses
decorator that adds headers to a response so that it will never be
cached, unless the view code has already set a Cache-Control
header.
"""
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: Any, **kwargs: Any) -> ReturnT:
response = view_func(request, *args, **kwargs)
if response.has_header("Cache-Control"):
return response
add_never_cache_headers(response)
return response
return _wrapped_view_func
@default_never_cache_responses
@csrf_exempt
def rest_dispatch(request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Dispatch to a REST API endpoint.
Unauthenticated endpoints should not use this, as authentication is verified
in the following ways:
* for paths beginning with /api, HTTP Basic auth
* for paths beginning with /json (used by the web client), the session token
This calls the function named in kwargs[request.method], if that request
method is supported, and after wrapping that function to:
* protect against CSRF (if the user is already authenticated through
a Django session)
* authenticate via an API key (otherwise)
* coerce PUT/PATCH/DELETE into having POST-like semantics for
retrieving variables
Any keyword args that are *not* HTTP methods are passed through to the
target function.
Never make a urls.py pattern put user input into a variable called GET, POST,
etc, as that is where we route HTTP verbs to target functions.
"""
supported_methods: Dict[str, Any] = {}
if hasattr(request, "saved_response"):
# For completing long-polled Tornado requests, we skip the
# view function logic and just return the response.
return request.saved_response
# duplicate kwargs so we can mutate the original as we go
for arg in list(kwargs):
if arg in METHODS:
supported_methods[arg] = kwargs[arg]
del kwargs[arg]
if 'GET' in supported_methods:
supported_methods.setdefault('HEAD', supported_methods['GET'])
if request.method == 'OPTIONS':
response = HttpResponse(status=204) # No content
response['Allow'] = ', '.join(sorted(supported_methods.keys()))
return response
# Override requested method if magic method=??? parameter exists
method_to_use = request.method
if request.POST and 'method' in request.POST:
method_to_use = request.POST['method']
if method_to_use in supported_methods:
entry = supported_methods[method_to_use]
if isinstance(entry, tuple):
target_function, view_flags = entry
target_function = import_string(target_function)
else:
target_function = import_string(supported_methods[method_to_use])
view_flags = set()
# Set request._query for update_activity_user(), which is called
# by some of the later wrappers.
request._query = target_function.__name__
# We want to support authentication by both cookies (web client)
# and API keys (API clients). In the former case, we want to
# do a check to ensure that CSRF etc is honored, but in the latter
# we can skip all of that.
#
# Security implications of this portion of the code are minimal,
# as we should worst-case fail closed if we miscategorise a request.
# for some special views (e.g. serving a file that has been
# uploaded), we support using the same url for web and API clients.
if ('override_api_url_scheme' in view_flags and
request.META.get('HTTP_AUTHORIZATION', None) is not None):
# This request uses standard API based authentication.
# For override_api_url_scheme views, we skip our normal
# rate limiting, because there are good reasons clients
# might need to (e.g.) request a large number of uploaded
# files or avatars in quick succession.
target_function = authenticated_rest_api_view(skip_rate_limiting=True)(target_function)
elif ('override_api_url_scheme' in view_flags and
request.GET.get('api_key') is not None):
# This request uses legacy API authentication. We
# unfortunately need that in the React Native mobile apps,
# because there's no way to set HTTP_AUTHORIZATION in
# React Native. See last block for rate limiting notes.
target_function = authenticated_uploads_api_view(skip_rate_limiting=True)(target_function)
# /json views (web client) validate with a session token (cookie)
elif not request.path.startswith("/api") and request.user.is_authenticated:
# Authenticated via sessions framework, only CSRF check needed
auth_kwargs = {}
if 'override_api_url_scheme' in view_flags:
auth_kwargs["skip_rate_limiting"] = True
target_function = csrf_protect(authenticated_json_view(target_function, **auth_kwargs))
# most clients (mobile, bots, etc) use HTTP Basic Auth and REST calls, where instead of
# username:password, we use email:apiKey
elif request.META.get('HTTP_AUTHORIZATION', None):
# Wrap function with decorator to authenticate the user before
# proceeding
view_kwargs = {}
if 'allow_incoming_webhooks' in view_flags:
view_kwargs['is_webhook'] = True
target_function = authenticated_rest_api_view(**view_kwargs)(target_function) # type: ignore[arg-type] # likely mypy bug
# Pick a way to tell user they're not authed based on how the request was made
else:
# If this looks like a request from a top-level page in a
# browser, send the user to the login page
if 'text/html' in request.META.get('HTTP_ACCEPT', ''):
# TODO: It seems like the `?next=` part is unlikely to be helpful
return HttpResponseRedirect(f'{settings.HOME_NOT_LOGGED_IN}?next={request.path}')
# Ask for basic auth (email:apiKey)
elif request.path.startswith("/api"):
return json_unauthorized()
# Logged out user accessing an endpoint with anonymous user access on JSON; proceed.
elif request.path.startswith("/json") and 'allow_anonymous_user_web' in view_flags:
auth_kwargs = dict(allow_unauthenticated=True)
target_function = csrf_protect(authenticated_json_view(
target_function, **auth_kwargs))
# Session cookie expired, notify the client
else:
return json_unauthorized(www_authenticate='session')
if request.method not in ["GET", "POST"]:
# process_as_post needs to be the outer decorator, because
# otherwise we might access and thus cache a value for
# request.REQUEST.
target_function = process_as_post(target_function)
return target_function(request, **kwargs)
return json_method_not_allowed(list(supported_methods.keys()))
| {
"repo_name": "shubhamdhama/zulip",
"path": "zerver/lib/rest.py",
"copies": "1",
"size": "8088",
"license": "apache-2.0",
"hash": -196735808177406400,
"line_mean": 46.023255814,
"line_max": 133,
"alpha_frac": 0.65628091,
"autogenerated": false,
"ratio": 4.369529983792544,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0012942493649674444,
"num_lines": 172
} |
from functools import wraps
from typing import Any, Callable, Dict
from django.utils.module_loading import import_string
from django.utils.translation import ugettext as _
from django.utils.cache import add_never_cache_headers
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from zerver.decorator import authenticated_json_view, authenticated_rest_api_view, \
process_as_post, authenticated_uploads_api_view, RespondAsynchronously, \
ReturnT
from zerver.lib.response import json_method_not_allowed, json_unauthorized
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
from django.conf import settings
METHODS = ('GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'PATCH')
FLAGS = ('override_api_url_scheme')
def default_never_cache_responses(
view_func: Callable[..., HttpResponse]) -> Callable[..., HttpResponse]:
"""Patched version of the standard Django never_cache_responses
decorator that adds headers to a response so that it will never be
cached, unless the view code has already set a Cache-Control
header.
We also need to patch this because our Django+Tornado
RespondAsynchronously hack involves returning a value that isn't a
Django response object, on which add_never_cache_headers would
crash. This only occurs in a case where client-side caching
wouldn't be possible anyway (we aren't returning a response to the
client yet -- it's for longpolling).
"""
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: Any, **kwargs: Any) -> ReturnT:
response = view_func(request, *args, **kwargs)
if response is RespondAsynchronously or response.has_header("Cache-Control"):
return response
add_never_cache_headers(response)
return response
return _wrapped_view_func
@default_never_cache_responses
@csrf_exempt
def rest_dispatch(request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Dispatch to a REST API endpoint.
Unauthenticated endpoints should not use this, as authentication is verified
in the following ways:
* for paths beginning with /api, HTTP Basic auth
* for paths beginning with /json (used by the web client), the session token
This calls the function named in kwargs[request.method], if that request
method is supported, and after wrapping that function to:
* protect against CSRF (if the user is already authenticated through
a Django session)
* authenticate via an API key (otherwise)
* coerce PUT/PATCH/DELETE into having POST-like semantics for
retrieving variables
Any keyword args that are *not* HTTP methods are passed through to the
target function.
Never make a urls.py pattern put user input into a variable called GET, POST,
etc, as that is where we route HTTP verbs to target functions.
"""
supported_methods = {} # type: Dict[str, Any]
# duplicate kwargs so we can mutate the original as we go
for arg in list(kwargs):
if arg in METHODS:
supported_methods[arg] = kwargs[arg]
del kwargs[arg]
if 'GET' in supported_methods:
supported_methods.setdefault('HEAD', supported_methods['GET'])
if request.method == 'OPTIONS':
response = HttpResponse(status=204) # No content
response['Allow'] = ', '.join(sorted(supported_methods.keys()))
return response
# Override requested method if magic method=??? parameter exists
method_to_use = request.method
if request.POST and 'method' in request.POST:
method_to_use = request.POST['method']
if method_to_use in supported_methods:
entry = supported_methods[method_to_use]
if isinstance(entry, tuple):
target_function, view_flags = entry
target_function = import_string(target_function)
else:
target_function = import_string(supported_methods[method_to_use])
view_flags = set()
# Set request._query for update_activity_user(), which is called
# by some of the later wrappers.
request._query = target_function.__name__
# We want to support authentication by both cookies (web client)
# and API keys (API clients). In the former case, we want to
# do a check to ensure that CSRF etc is honored, but in the latter
# we can skip all of that.
#
# Security implications of this portion of the code are minimal,
# as we should worst-case fail closed if we miscategorise a request.
# for some special views (e.g. serving a file that has been
# uploaded), we support using the same url for web and API clients.
if ('override_api_url_scheme' in view_flags and
request.META.get('HTTP_AUTHORIZATION', None) is not None):
# This request uses standard API based authentication.
# For override_api_url_scheme views, we skip our normal
# rate limiting, because there are good reasons clients
# might need to (e.g.) request a large number of uploaded
# files or avatars in quick succession.
target_function = authenticated_rest_api_view(skip_rate_limiting=True)(target_function)
elif ('override_api_url_scheme' in view_flags and
request.GET.get('api_key') is not None):
# This request uses legacy API authentication. We
# unfortunately need that in the React Native mobile apps,
# because there's no way to set HTTP_AUTHORIZATION in
# React Native. See last block for rate limiting notes.
target_function = authenticated_uploads_api_view(skip_rate_limiting=True)(target_function)
# /json views (web client) validate with a session token (cookie)
elif not request.path.startswith("/api") and request.user.is_authenticated:
# Authenticated via sessions framework, only CSRF check needed
auth_kwargs = {}
if 'override_api_url_scheme' in view_flags:
auth_kwargs["skip_rate_limiting"] = True
target_function = csrf_protect(authenticated_json_view(target_function, **auth_kwargs))
# most clients (mobile, bots, etc) use HTTP Basic Auth and REST calls, where instead of
# username:password, we use email:apiKey
elif request.META.get('HTTP_AUTHORIZATION', None):
# Wrap function with decorator to authenticate the user before
# proceeding
view_kwargs = {}
if 'allow_incoming_webhooks' in view_flags:
view_kwargs['is_webhook'] = True
target_function = authenticated_rest_api_view(**view_kwargs)(target_function) # type: ignore # likely mypy bug
# Pick a way to tell user they're not authed based on how the request was made
else:
# If this looks like a request from a top-level page in a
# browser, send the user to the login page
if 'text/html' in request.META.get('HTTP_ACCEPT', ''):
# TODO: It seems like the `?next=` part is unlikely to be helpful
return HttpResponseRedirect('%s?next=%s' % (settings.HOME_NOT_LOGGED_IN, request.path))
# Ask for basic auth (email:apiKey)
elif request.path.startswith("/api"):
return json_unauthorized(_("Not logged in: API authentication or user session required"))
# Logged out user accessing an endpoint with anonymous user access on JSON; proceed.
elif request.path.startswith("/json") and 'allow_anonymous_user_web' in view_flags:
auth_kwargs = dict(allow_unauthenticated=True)
target_function = csrf_protect(authenticated_json_view(
target_function, **auth_kwargs))
# Session cookie expired, notify the client
else:
return json_unauthorized(_("Not logged in: API authentication or user session required"),
www_authenticate='session')
if request.method not in ["GET", "POST"]:
# process_as_post needs to be the outer decorator, because
# otherwise we might access and thus cache a value for
# request.REQUEST.
target_function = process_as_post(target_function)
return target_function(request, **kwargs)
return json_method_not_allowed(list(supported_methods.keys()))
| {
"repo_name": "rht/zulip",
"path": "zerver/lib/rest.py",
"copies": "1",
"size": "8524",
"license": "apache-2.0",
"hash": -3153767207414811600,
"line_mean": 48.5581395349,
"line_max": 123,
"alpha_frac": 0.6613092445,
"autogenerated": false,
"ratio": 4.405167958656331,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.556647720315633,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from typing import Any, Callable, Dict, Optional, cast
from unittest import mock
import orjson
from django.conf import settings
from django.test import override_settings
from zerver.lib.actions import do_create_user, get_service_bot_events
from zerver.lib.bot_config import ConfigError, load_bot_config_template, set_bot_config
from zerver.lib.bot_lib import EmbeddedBotEmptyRecipientsList, EmbeddedBotHandler, StateHandler
from zerver.lib.bot_storage import StateError
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import patch_queue_publish
from zerver.lib.validator import check_string
from zerver.models import Recipient, UserProfile, get_realm
BOT_TYPE_TO_QUEUE_NAME = {
UserProfile.OUTGOING_WEBHOOK_BOT: 'outgoing_webhooks',
UserProfile.EMBEDDED_BOT: 'embedded_bots',
}
class TestServiceBotBasics(ZulipTestCase):
def _get_outgoing_bot(self) -> UserProfile:
outgoing_bot = do_create_user(
email="bar-bot@zulip.com",
password="test",
realm=get_realm("zulip"),
full_name="BarBot",
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
bot_owner=self.example_user('cordelia'),
)
return outgoing_bot
def test_service_events_for_pms(self) -> None:
sender = self.example_user('hamlet')
assert(not sender.is_bot)
outgoing_bot = self._get_outgoing_bot()
assert outgoing_bot.bot_type is not None
event_dict = get_service_bot_events(
sender=sender,
service_bot_tuples=[
(outgoing_bot.id, outgoing_bot.bot_type),
],
active_user_ids={outgoing_bot.id},
mentioned_user_ids=set(),
recipient_type=Recipient.PERSONAL,
)
expected = dict(
outgoing_webhooks=[
dict(trigger='private_message', user_profile_id=outgoing_bot.id),
],
)
self.assertEqual(event_dict, expected)
def test_spurious_mentions(self) -> None:
sender = self.example_user('hamlet')
assert(not sender.is_bot)
outgoing_bot = self._get_outgoing_bot()
assert outgoing_bot.bot_type is not None
# If outgoing_bot is not in mentioned_user_ids,
# we will skip over it. This tests an anomaly
# of the code that our query for bots can include
# bots that may not actually be mentioned, and it's
# easiest to just filter them in get_service_bot_events.
event_dict = get_service_bot_events(
sender=sender,
service_bot_tuples=[
(outgoing_bot.id, outgoing_bot.bot_type),
],
active_user_ids={outgoing_bot.id},
mentioned_user_ids=set(),
recipient_type=Recipient.STREAM,
)
self.assertEqual(len(event_dict), 0)
def test_service_events_for_stream_mentions(self) -> None:
sender = self.example_user('hamlet')
assert(not sender.is_bot)
outgoing_bot = self._get_outgoing_bot()
assert outgoing_bot.bot_type is not None
cordelia = self.example_user('cordelia')
red_herring_bot = self.create_test_bot(
short_name='whatever',
user_profile=cordelia,
)
event_dict = get_service_bot_events(
sender=sender,
service_bot_tuples=[
(outgoing_bot.id, outgoing_bot.bot_type),
(red_herring_bot.id, UserProfile.OUTGOING_WEBHOOK_BOT),
],
active_user_ids=set(),
mentioned_user_ids={outgoing_bot.id},
recipient_type=Recipient.STREAM,
)
expected = dict(
outgoing_webhooks=[
dict(trigger='mention', user_profile_id=outgoing_bot.id),
],
)
self.assertEqual(event_dict, expected)
def test_service_events_for_private_mentions(self) -> None:
"""Service bots should not get access to mentions if they aren't a
direct recipient."""
sender = self.example_user('hamlet')
assert(not sender.is_bot)
outgoing_bot = self._get_outgoing_bot()
assert outgoing_bot.bot_type is not None
event_dict = get_service_bot_events(
sender=sender,
service_bot_tuples=[
(outgoing_bot.id, outgoing_bot.bot_type),
],
active_user_ids=set(),
mentioned_user_ids={outgoing_bot.id},
recipient_type=Recipient.PERSONAL,
)
self.assertEqual(len(event_dict), 0)
def test_service_events_with_unexpected_bot_type(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
bot = self.create_test_bot(
short_name='whatever',
user_profile=cordelia,
)
wrong_bot_type = UserProfile.INCOMING_WEBHOOK_BOT
bot.bot_type = wrong_bot_type
bot.save()
with self.assertLogs(level="ERROR") as m:
event_dict = get_service_bot_events(
sender=hamlet,
service_bot_tuples=[
(bot.id, wrong_bot_type),
],
active_user_ids=set(),
mentioned_user_ids={bot.id},
recipient_type=Recipient.PERSONAL,
)
self.assertEqual(len(event_dict), 0)
self.assertEqual(m.output, [f"ERROR:root:Unexpected bot_type for Service bot id={bot.id}: {wrong_bot_type}"])
class TestServiceBotStateHandler(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user("othello")
self.bot_profile = do_create_user(email="embedded-bot-1@zulip.com",
password="test",
realm=get_realm("zulip"),
full_name="EmbeddedBo1",
bot_type=UserProfile.EMBEDDED_BOT,
bot_owner=self.user_profile)
self.second_bot_profile = do_create_user(email="embedded-bot-2@zulip.com",
password="test",
realm=get_realm("zulip"),
full_name="EmbeddedBot2",
bot_type=UserProfile.EMBEDDED_BOT,
bot_owner=self.user_profile)
def test_basic_storage_and_retrieval(self) -> None:
storage = StateHandler(self.bot_profile)
storage.put('some key', 'some value')
storage.put('some other key', 'some other value')
self.assertEqual(storage.get('some key'), 'some value')
self.assertEqual(storage.get('some other key'), 'some other value')
self.assertTrue(storage.contains('some key'))
self.assertFalse(storage.contains('nonexistent key'))
self.assertRaisesMessage(StateError,
"Key does not exist.",
lambda: storage.get('nonexistent key'))
storage.put('some key', 'a new value')
self.assertEqual(storage.get('some key'), 'a new value')
second_storage = StateHandler(self.second_bot_profile)
self.assertRaises(StateError, lambda: second_storage.get('some key'))
second_storage.put('some key', 'yet another value')
self.assertEqual(storage.get('some key'), 'a new value')
self.assertEqual(second_storage.get('some key'), 'yet another value')
def test_marshaling(self) -> None:
storage = StateHandler(self.bot_profile)
serializable_obj = {'foo': 'bar', 'baz': [42, 'cux']}
storage.put('some key', serializable_obj)
self.assertEqual(storage.get('some key'), serializable_obj)
# Reduce maximal storage size for faster test string construction.
@override_settings(USER_STATE_SIZE_LIMIT=100)
def test_storage_limit(self) -> None:
storage = StateHandler(self.bot_profile)
# Disable marshaling for storing a string whose size is
# equivalent to the size of the stored object.
storage.marshal = lambda obj: check_string("obj", obj)
storage.demarshal = lambda obj: obj
key = 'capacity-filling entry'
storage.put(key, 'x' * (settings.USER_STATE_SIZE_LIMIT - len(key)))
with self.assertRaisesMessage(StateError, "Request exceeds storage limit by 32 characters. "
"The limit is 100 characters."):
storage.put('too much data', 'a few bits too long')
second_storage = StateHandler(self.second_bot_profile)
second_storage.put('another big entry', 'x' * (settings.USER_STATE_SIZE_LIMIT - 40))
second_storage.put('normal entry', 'abcd')
def test_entry_removal(self) -> None:
storage = StateHandler(self.bot_profile)
storage.put('some key', 'some value')
storage.put('another key', 'some value')
self.assertTrue(storage.contains('some key'))
self.assertTrue(storage.contains('another key'))
storage.remove('some key')
self.assertFalse(storage.contains('some key'))
self.assertTrue(storage.contains('another key'))
self.assertRaises(StateError, lambda: storage.remove('some key'))
def test_internal_endpoint(self) -> None:
self.login_user(self.user_profile)
# Store some data.
initial_dict = {'key 1': 'value 1', 'key 2': 'value 2', 'key 3': 'value 3'}
params = {
'storage': orjson.dumps(initial_dict).decode(),
}
result = self.client_put('/json/bot_storage', params)
self.assert_json_success(result)
# Assert the stored data for some keys.
params = {
'keys': orjson.dumps(['key 1', 'key 3']).decode(),
}
result = self.client_get('/json/bot_storage', params)
self.assert_json_success(result)
self.assertEqual(result.json()['storage'], {'key 3': 'value 3', 'key 1': 'value 1'})
# Assert the stored data for all keys.
result = self.client_get('/json/bot_storage')
self.assert_json_success(result)
self.assertEqual(result.json()['storage'], initial_dict)
# Store some more data; update an entry and store a new entry
dict_update = {'key 1': 'new value', 'key 4': 'value 4'}
params = {
'storage': orjson.dumps(dict_update).decode(),
}
result = self.client_put('/json/bot_storage', params)
self.assert_json_success(result)
# Assert the data was updated.
updated_dict = initial_dict.copy()
updated_dict.update(dict_update)
result = self.client_get('/json/bot_storage')
self.assert_json_success(result)
self.assertEqual(result.json()['storage'], updated_dict)
# Assert errors on invalid requests.
invalid_params = {
'keys': ["This is a list, but should be a serialized string."],
}
result = self.client_get('/json/bot_storage', invalid_params)
self.assert_json_error(result, 'Argument "keys" is not valid JSON.')
params = {
'keys': orjson.dumps(["key 1", "nonexistent key"]).decode(),
}
result = self.client_get('/json/bot_storage', params)
self.assert_json_error(result, "Key does not exist.")
params = {
'storage': orjson.dumps({'foo': [1, 2, 3]}).decode(),
}
result = self.client_put('/json/bot_storage', params)
self.assert_json_error(result, "storage contains a value that is not a string")
# Remove some entries.
keys_to_remove = ['key 1', 'key 2']
params = {
'keys': orjson.dumps(keys_to_remove).decode(),
}
result = self.client_delete('/json/bot_storage', params)
self.assert_json_success(result)
# Assert the entries were removed.
for key in keys_to_remove:
updated_dict.pop(key)
result = self.client_get('/json/bot_storage')
self.assert_json_success(result)
self.assertEqual(result.json()['storage'], updated_dict)
# Try to remove an existing and a nonexistent key.
params = {
'keys': orjson.dumps(['key 3', 'nonexistent key']).decode(),
}
result = self.client_delete('/json/bot_storage', params)
self.assert_json_error(result, "Key does not exist.")
# Assert an error has been thrown and no entries were removed.
result = self.client_get('/json/bot_storage')
self.assert_json_success(result)
self.assertEqual(result.json()['storage'], updated_dict)
# Remove the entire storage.
result = self.client_delete('/json/bot_storage')
self.assert_json_success(result)
# Assert the entire storage has been removed.
result = self.client_get('/json/bot_storage')
self.assert_json_success(result)
self.assertEqual(result.json()['storage'], {})
class TestServiceBotConfigHandler(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user("othello")
self.bot_profile = self.create_test_bot('embedded', self.user_profile,
full_name='Embedded bot',
bot_type=UserProfile.EMBEDDED_BOT,
service_name='helloworld')
self.bot_handler = EmbeddedBotHandler(self.bot_profile)
def test_basic_storage_and_retrieval(self) -> None:
with self.assertRaises(ConfigError):
self.bot_handler.get_config_info('foo')
self.assertEqual(self.bot_handler.get_config_info('foo', optional=True), {})
config_dict = {"entry 1": "value 1", "entry 2": "value 2"}
for key, value in config_dict.items():
set_bot_config(self.bot_profile, key, value)
self.assertEqual(self.bot_handler.get_config_info('foo'), config_dict)
config_update = {"entry 2": "new value", "entry 3": "value 3"}
for key, value in config_update.items():
set_bot_config(self.bot_profile, key, value)
config_dict.update(config_update)
self.assertEqual(self.bot_handler.get_config_info('foo'), config_dict)
@override_settings(BOT_CONFIG_SIZE_LIMIT=100)
def test_config_entry_limit(self) -> None:
set_bot_config(self.bot_profile, "some key", 'x' * (settings.BOT_CONFIG_SIZE_LIMIT-8))
self.assertRaisesMessage(ConfigError,
"Cannot store configuration. Request would require 101 characters. "
"The current configuration size limit is 100 characters.",
lambda: set_bot_config(self.bot_profile, "some key", 'x' * (settings.BOT_CONFIG_SIZE_LIMIT-8+1)))
set_bot_config(self.bot_profile, "some key", 'x' * (settings.BOT_CONFIG_SIZE_LIMIT-20))
set_bot_config(self.bot_profile, "another key", 'x')
self.assertRaisesMessage(ConfigError,
"Cannot store configuration. Request would require 116 characters. "
"The current configuration size limit is 100 characters.",
lambda: set_bot_config(self.bot_profile, "yet another key", 'x'))
def test_load_bot_config_template(self) -> None:
bot_config = load_bot_config_template('giphy')
self.assertTrue(isinstance(bot_config, dict))
self.assertEqual(len(bot_config), 1)
def test_load_bot_config_template_for_bot_without_config_data(self) -> None:
bot_config = load_bot_config_template('converter')
self.assertTrue(isinstance(bot_config, dict))
self.assertEqual(len(bot_config), 0)
def test_bot_send_pm_with_empty_recipients_list(self) -> None:
with self.assertRaisesRegex(EmbeddedBotEmptyRecipientsList, 'Message must have recipients!'):
self.bot_handler.send_message(message={'type': 'private', 'to': []})
def for_all_bot_types(test_func: Callable[..., None]) -> Callable[..., None]:
@wraps(test_func)
def _wrapped(*args: object, **kwargs: object) -> None:
assert len(args) > 0
self = cast(TestServiceBotEventTriggers, args[0])
for bot_type in BOT_TYPE_TO_QUEUE_NAME:
self.bot_profile.bot_type = bot_type
self.bot_profile.save()
test_func(*args, **kwargs)
return _wrapped
class TestServiceBotEventTriggers(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user("othello")
self.bot_profile = do_create_user(email="foo-bot@zulip.com",
password="test",
realm=get_realm("zulip"),
full_name="FooBot",
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
bot_owner=self.user_profile)
self.second_bot_profile = do_create_user(email="bar-bot@zulip.com",
password="test",
realm=get_realm("zulip"),
full_name="BarBot",
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
bot_owner=self.user_profile)
@for_all_bot_types
@patch_queue_publish('zerver.lib.actions.queue_json_publish')
def test_trigger_on_stream_mention_from_user(self, mock_queue_json_publish: mock.Mock) -> None:
content = '@**FooBot** foo bar!!!'
recipient = 'Denmark'
trigger = 'mention'
message_type = Recipient._type_names[Recipient.STREAM]
def check_values_passed(
queue_name: Any,
trigger_event: Dict[str, Any],
processor: Optional[Callable[[Any], None]] = None,
) -> None:
assert self.bot_profile.bot_type
self.assertEqual(queue_name, BOT_TYPE_TO_QUEUE_NAME[self.bot_profile.bot_type])
self.assertEqual(trigger_event["message"]["content"], content)
self.assertEqual(trigger_event["message"]["display_recipient"], recipient)
self.assertEqual(trigger_event["message"]["sender_email"], self.user_profile.email)
self.assertEqual(trigger_event["message"]["type"], message_type)
self.assertEqual(trigger_event['trigger'], trigger)
self.assertEqual(trigger_event['user_profile_id'], self.bot_profile.id)
mock_queue_json_publish.side_effect = check_values_passed
self.send_stream_message(
self.user_profile,
'Denmark',
content)
self.assertTrue(mock_queue_json_publish.called)
@patch_queue_publish('zerver.lib.actions.queue_json_publish')
def test_no_trigger_on_stream_message_without_mention(self, mock_queue_json_publish: mock.Mock) -> None:
sender = self.user_profile
self.send_stream_message(sender, "Denmark")
self.assertFalse(mock_queue_json_publish.called)
@for_all_bot_types
@patch_queue_publish('zerver.lib.actions.queue_json_publish')
def test_no_trigger_on_stream_mention_from_bot(self, mock_queue_json_publish: mock.Mock) -> None:
self.send_stream_message(
self.second_bot_profile,
'Denmark',
'@**FooBot** foo bar!!!')
self.assertFalse(mock_queue_json_publish.called)
@for_all_bot_types
@patch_queue_publish('zerver.lib.actions.queue_json_publish')
def test_trigger_on_personal_message_from_user(self, mock_queue_json_publish: mock.Mock) -> None:
sender = self.user_profile
recipient = self.bot_profile
def check_values_passed(
queue_name: Any,
trigger_event: Dict[str, Any],
processor: Optional[Callable[[Any], None]] = None,
) -> None:
assert self.bot_profile.bot_type
self.assertEqual(queue_name, BOT_TYPE_TO_QUEUE_NAME[self.bot_profile.bot_type])
self.assertEqual(trigger_event["user_profile_id"], self.bot_profile.id)
self.assertEqual(trigger_event["trigger"], "private_message")
self.assertEqual(trigger_event["message"]["sender_email"], sender.email)
display_recipients = [
trigger_event["message"]["display_recipient"][0]["email"],
trigger_event["message"]["display_recipient"][1]["email"],
]
self.assertTrue(sender.email in display_recipients)
self.assertTrue(recipient.email in display_recipients)
mock_queue_json_publish.side_effect = check_values_passed
self.send_personal_message(sender, recipient, 'test')
self.assertTrue(mock_queue_json_publish.called)
@for_all_bot_types
@patch_queue_publish('zerver.lib.actions.queue_json_publish')
def test_no_trigger_on_personal_message_from_bot(self, mock_queue_json_publish: mock.Mock) -> None:
sender = self.second_bot_profile
recipient = self.bot_profile
self.send_personal_message(sender, recipient)
self.assertFalse(mock_queue_json_publish.called)
@for_all_bot_types
@patch_queue_publish('zerver.lib.actions.queue_json_publish')
def test_trigger_on_huddle_message_from_user(self, mock_queue_json_publish: mock.Mock) -> None:
self.second_bot_profile.bot_type = self.bot_profile.bot_type
self.second_bot_profile.save()
sender = self.user_profile
recipients = [self.bot_profile, self.second_bot_profile]
profile_ids = [self.bot_profile.id, self.second_bot_profile.id]
def check_values_passed(
queue_name: Any,
trigger_event: Dict[str, Any],
processor: Optional[Callable[[Any], None]] = None,
) -> None:
assert self.bot_profile.bot_type
self.assertEqual(queue_name, BOT_TYPE_TO_QUEUE_NAME[self.bot_profile.bot_type])
self.assertIn(trigger_event["user_profile_id"], profile_ids)
profile_ids.remove(trigger_event["user_profile_id"])
self.assertEqual(trigger_event["trigger"], "private_message")
self.assertEqual(trigger_event["message"]["sender_email"], sender.email)
self.assertEqual(trigger_event["message"]["type"], 'private')
mock_queue_json_publish.side_effect = check_values_passed
self.send_huddle_message(sender, recipients, 'test')
self.assertEqual(mock_queue_json_publish.call_count, 2)
@for_all_bot_types
@patch_queue_publish('zerver.lib.actions.queue_json_publish')
def test_no_trigger_on_huddle_message_from_bot(self, mock_queue_json_publish: mock.Mock) -> None:
sender = self.second_bot_profile
recipients = [self.user_profile, self.bot_profile]
self.send_huddle_message(sender, recipients)
self.assertFalse(mock_queue_json_publish.called)
| {
"repo_name": "kou/zulip",
"path": "zerver/tests/test_service_bot_system.py",
"copies": "3",
"size": "23393",
"license": "apache-2.0",
"hash": -8525752146779063000,
"line_mean": 43.1377358491,
"line_max": 130,
"alpha_frac": 0.5945795751,
"autogenerated": false,
"ratio": 3.9974367737525633,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6092016348852564,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from typing import Any, Callable, Dict, Optional, TypeVar, cast
from unittest import mock
import orjson
from django.conf import settings
from django.test import override_settings
from zerver.lib.actions import do_create_user, get_service_bot_events
from zerver.lib.bot_config import ConfigError, load_bot_config_template, set_bot_config
from zerver.lib.bot_lib import EmbeddedBotEmptyRecipientsList, EmbeddedBotHandler, StateHandler
from zerver.lib.bot_storage import StateError
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import patch_queue_publish
from zerver.lib.validator import check_string
from zerver.models import Recipient, UserProfile, get_realm
BOT_TYPE_TO_QUEUE_NAME = {
UserProfile.OUTGOING_WEBHOOK_BOT: "outgoing_webhooks",
UserProfile.EMBEDDED_BOT: "embedded_bots",
}
class TestServiceBotBasics(ZulipTestCase):
def _get_outgoing_bot(self) -> UserProfile:
outgoing_bot = do_create_user(
email="bar-bot@zulip.com",
password="test",
realm=get_realm("zulip"),
full_name="BarBot",
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
bot_owner=self.example_user("cordelia"),
acting_user=None,
)
return outgoing_bot
def test_service_events_for_pms(self) -> None:
sender = self.example_user("hamlet")
assert not sender.is_bot
outgoing_bot = self._get_outgoing_bot()
assert outgoing_bot.bot_type is not None
event_dict = get_service_bot_events(
sender=sender,
service_bot_tuples=[
(outgoing_bot.id, outgoing_bot.bot_type),
],
active_user_ids={outgoing_bot.id},
mentioned_user_ids=set(),
recipient_type=Recipient.PERSONAL,
)
expected = dict(
outgoing_webhooks=[
dict(trigger="private_message", user_profile_id=outgoing_bot.id),
],
)
self.assertEqual(event_dict, expected)
def test_spurious_mentions(self) -> None:
sender = self.example_user("hamlet")
assert not sender.is_bot
outgoing_bot = self._get_outgoing_bot()
assert outgoing_bot.bot_type is not None
# If outgoing_bot is not in mentioned_user_ids,
# we will skip over it. This tests an anomaly
# of the code that our query for bots can include
# bots that may not actually be mentioned, and it's
# easiest to just filter them in get_service_bot_events.
event_dict = get_service_bot_events(
sender=sender,
service_bot_tuples=[
(outgoing_bot.id, outgoing_bot.bot_type),
],
active_user_ids={outgoing_bot.id},
mentioned_user_ids=set(),
recipient_type=Recipient.STREAM,
)
self.assertEqual(len(event_dict), 0)
def test_service_events_for_stream_mentions(self) -> None:
sender = self.example_user("hamlet")
assert not sender.is_bot
outgoing_bot = self._get_outgoing_bot()
assert outgoing_bot.bot_type is not None
cordelia = self.example_user("cordelia")
red_herring_bot = self.create_test_bot(
short_name="whatever",
user_profile=cordelia,
)
event_dict = get_service_bot_events(
sender=sender,
service_bot_tuples=[
(outgoing_bot.id, outgoing_bot.bot_type),
(red_herring_bot.id, UserProfile.OUTGOING_WEBHOOK_BOT),
],
active_user_ids=set(),
mentioned_user_ids={outgoing_bot.id},
recipient_type=Recipient.STREAM,
)
expected = dict(
outgoing_webhooks=[
dict(trigger="mention", user_profile_id=outgoing_bot.id),
],
)
self.assertEqual(event_dict, expected)
def test_service_events_for_private_mentions(self) -> None:
"""Service bots should not get access to mentions if they aren't a
direct recipient."""
sender = self.example_user("hamlet")
assert not sender.is_bot
outgoing_bot = self._get_outgoing_bot()
assert outgoing_bot.bot_type is not None
event_dict = get_service_bot_events(
sender=sender,
service_bot_tuples=[
(outgoing_bot.id, outgoing_bot.bot_type),
],
active_user_ids=set(),
mentioned_user_ids={outgoing_bot.id},
recipient_type=Recipient.PERSONAL,
)
self.assertEqual(len(event_dict), 0)
def test_service_events_with_unexpected_bot_type(self) -> None:
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
bot = self.create_test_bot(
short_name="whatever",
user_profile=cordelia,
)
wrong_bot_type = UserProfile.INCOMING_WEBHOOK_BOT
bot.bot_type = wrong_bot_type
bot.save()
with self.assertLogs(level="ERROR") as m:
event_dict = get_service_bot_events(
sender=hamlet,
service_bot_tuples=[
(bot.id, wrong_bot_type),
],
active_user_ids=set(),
mentioned_user_ids={bot.id},
recipient_type=Recipient.PERSONAL,
)
self.assertEqual(len(event_dict), 0)
self.assertEqual(
m.output,
[f"ERROR:root:Unexpected bot_type for Service bot id={bot.id}: {wrong_bot_type}"],
)
class TestServiceBotStateHandler(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user("othello")
self.bot_profile = do_create_user(
email="embedded-bot-1@zulip.com",
password="test",
realm=get_realm("zulip"),
full_name="EmbeddedBo1",
bot_type=UserProfile.EMBEDDED_BOT,
bot_owner=self.user_profile,
acting_user=None,
)
self.second_bot_profile = do_create_user(
email="embedded-bot-2@zulip.com",
password="test",
realm=get_realm("zulip"),
full_name="EmbeddedBot2",
bot_type=UserProfile.EMBEDDED_BOT,
bot_owner=self.user_profile,
acting_user=None,
)
def test_basic_storage_and_retrieval(self) -> None:
storage = StateHandler(self.bot_profile)
storage.put("some key", "some value")
storage.put("some other key", "some other value")
self.assertEqual(storage.get("some key"), "some value")
self.assertEqual(storage.get("some other key"), "some other value")
self.assertTrue(storage.contains("some key"))
self.assertFalse(storage.contains("nonexistent key"))
self.assertRaisesMessage(
StateError, "Key does not exist.", lambda: storage.get("nonexistent key")
)
storage.put("some key", "a new value")
self.assertEqual(storage.get("some key"), "a new value")
second_storage = StateHandler(self.second_bot_profile)
self.assertRaises(StateError, lambda: second_storage.get("some key"))
second_storage.put("some key", "yet another value")
self.assertEqual(storage.get("some key"), "a new value")
self.assertEqual(second_storage.get("some key"), "yet another value")
def test_marshaling(self) -> None:
storage = StateHandler(self.bot_profile)
serializable_obj = {"foo": "bar", "baz": [42, "cux"]}
storage.put("some key", serializable_obj)
self.assertEqual(storage.get("some key"), serializable_obj)
# Reduce maximal storage size for faster test string construction.
@override_settings(USER_STATE_SIZE_LIMIT=100)
def test_storage_limit(self) -> None:
storage = StateHandler(self.bot_profile)
# Disable marshaling for storing a string whose size is
# equivalent to the size of the stored object.
storage.marshal = lambda obj: check_string("obj", obj)
storage.demarshal = lambda obj: obj
key = "capacity-filling entry"
storage.put(key, "x" * (settings.USER_STATE_SIZE_LIMIT - len(key)))
with self.assertRaisesMessage(
StateError,
"Request exceeds storage limit by 32 characters. The limit is 100 characters.",
):
storage.put("too much data", "a few bits too long")
second_storage = StateHandler(self.second_bot_profile)
second_storage.put("another big entry", "x" * (settings.USER_STATE_SIZE_LIMIT - 40))
second_storage.put("normal entry", "abcd")
def test_entry_removal(self) -> None:
storage = StateHandler(self.bot_profile)
storage.put("some key", "some value")
storage.put("another key", "some value")
self.assertTrue(storage.contains("some key"))
self.assertTrue(storage.contains("another key"))
storage.remove("some key")
self.assertFalse(storage.contains("some key"))
self.assertTrue(storage.contains("another key"))
self.assertRaises(StateError, lambda: storage.remove("some key"))
def test_internal_endpoint(self) -> None:
self.login_user(self.user_profile)
# Store some data.
initial_dict = {"key 1": "value 1", "key 2": "value 2", "key 3": "value 3"}
params = {
"storage": orjson.dumps(initial_dict).decode(),
}
result = self.client_put("/json/bot_storage", params)
self.assert_json_success(result)
# Assert the stored data for some keys.
params = {
"keys": orjson.dumps(["key 1", "key 3"]).decode(),
}
result = self.client_get("/json/bot_storage", params)
self.assert_json_success(result)
self.assertEqual(result.json()["storage"], {"key 3": "value 3", "key 1": "value 1"})
# Assert the stored data for all keys.
result = self.client_get("/json/bot_storage")
self.assert_json_success(result)
self.assertEqual(result.json()["storage"], initial_dict)
# Store some more data; update an entry and store a new entry
dict_update = {"key 1": "new value", "key 4": "value 4"}
params = {
"storage": orjson.dumps(dict_update).decode(),
}
result = self.client_put("/json/bot_storage", params)
self.assert_json_success(result)
# Assert the data was updated.
updated_dict = initial_dict.copy()
updated_dict.update(dict_update)
result = self.client_get("/json/bot_storage")
self.assert_json_success(result)
self.assertEqual(result.json()["storage"], updated_dict)
# Assert errors on invalid requests.
invalid_params = {
"keys": ["This is a list, but should be a serialized string."],
}
result = self.client_get("/json/bot_storage", invalid_params)
self.assert_json_error(result, 'Argument "keys" is not valid JSON.')
params = {
"keys": orjson.dumps(["key 1", "nonexistent key"]).decode(),
}
result = self.client_get("/json/bot_storage", params)
self.assert_json_error(result, "Key does not exist.")
params = {
"storage": orjson.dumps({"foo": [1, 2, 3]}).decode(),
}
result = self.client_put("/json/bot_storage", params)
self.assert_json_error(result, "storage contains a value that is not a string")
# Remove some entries.
keys_to_remove = ["key 1", "key 2"]
params = {
"keys": orjson.dumps(keys_to_remove).decode(),
}
result = self.client_delete("/json/bot_storage", params)
self.assert_json_success(result)
# Assert the entries were removed.
for key in keys_to_remove:
updated_dict.pop(key)
result = self.client_get("/json/bot_storage")
self.assert_json_success(result)
self.assertEqual(result.json()["storage"], updated_dict)
# Try to remove an existing and a nonexistent key.
params = {
"keys": orjson.dumps(["key 3", "nonexistent key"]).decode(),
}
result = self.client_delete("/json/bot_storage", params)
self.assert_json_error(result, "Key does not exist.")
# Assert an error has been thrown and no entries were removed.
result = self.client_get("/json/bot_storage")
self.assert_json_success(result)
self.assertEqual(result.json()["storage"], updated_dict)
# Remove the entire storage.
result = self.client_delete("/json/bot_storage")
self.assert_json_success(result)
# Assert the entire storage has been removed.
result = self.client_get("/json/bot_storage")
self.assert_json_success(result)
self.assertEqual(result.json()["storage"], {})
class TestServiceBotConfigHandler(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user("othello")
self.bot_profile = self.create_test_bot(
"embedded",
self.user_profile,
full_name="Embedded bot",
bot_type=UserProfile.EMBEDDED_BOT,
service_name="helloworld",
)
self.bot_handler = EmbeddedBotHandler(self.bot_profile)
def test_basic_storage_and_retrieval(self) -> None:
with self.assertRaises(ConfigError):
self.bot_handler.get_config_info("foo")
self.assertEqual(self.bot_handler.get_config_info("foo", optional=True), {})
config_dict = {"entry 1": "value 1", "entry 2": "value 2"}
for key, value in config_dict.items():
set_bot_config(self.bot_profile, key, value)
self.assertEqual(self.bot_handler.get_config_info("foo"), config_dict)
config_update = {"entry 2": "new value", "entry 3": "value 3"}
for key, value in config_update.items():
set_bot_config(self.bot_profile, key, value)
config_dict.update(config_update)
self.assertEqual(self.bot_handler.get_config_info("foo"), config_dict)
@override_settings(BOT_CONFIG_SIZE_LIMIT=100)
def test_config_entry_limit(self) -> None:
set_bot_config(self.bot_profile, "some key", "x" * (settings.BOT_CONFIG_SIZE_LIMIT - 8))
self.assertRaisesMessage(
ConfigError,
"Cannot store configuration. Request would require 101 characters. "
"The current configuration size limit is 100 characters.",
lambda: set_bot_config(
self.bot_profile, "some key", "x" * (settings.BOT_CONFIG_SIZE_LIMIT - 8 + 1)
),
)
set_bot_config(self.bot_profile, "some key", "x" * (settings.BOT_CONFIG_SIZE_LIMIT - 20))
set_bot_config(self.bot_profile, "another key", "x")
self.assertRaisesMessage(
ConfigError,
"Cannot store configuration. Request would require 116 characters. "
"The current configuration size limit is 100 characters.",
lambda: set_bot_config(self.bot_profile, "yet another key", "x"),
)
def test_load_bot_config_template(self) -> None:
bot_config = load_bot_config_template("giphy")
self.assertTrue(isinstance(bot_config, dict))
self.assertEqual(len(bot_config), 1)
def test_load_bot_config_template_for_bot_without_config_data(self) -> None:
bot_config = load_bot_config_template("converter")
self.assertTrue(isinstance(bot_config, dict))
self.assertEqual(len(bot_config), 0)
def test_bot_send_pm_with_empty_recipients_list(self) -> None:
with self.assertRaisesRegex(
EmbeddedBotEmptyRecipientsList, "Message must have recipients!"
):
self.bot_handler.send_message(message={"type": "private", "to": []})
FuncT = TypeVar("FuncT", bound=Callable[..., None])
def for_all_bot_types(test_func: FuncT) -> FuncT:
@wraps(test_func)
def _wrapped(*args: object, **kwargs: object) -> None:
assert len(args) > 0
self = cast(TestServiceBotEventTriggers, args[0])
for bot_type in BOT_TYPE_TO_QUEUE_NAME:
self.bot_profile.bot_type = bot_type
self.bot_profile.save()
test_func(*args, **kwargs)
return cast(FuncT, _wrapped) # https://github.com/python/mypy/issues/1927
class TestServiceBotEventTriggers(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user("othello")
self.bot_profile = do_create_user(
email="foo-bot@zulip.com",
password="test",
realm=get_realm("zulip"),
full_name="FooBot",
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
bot_owner=self.user_profile,
acting_user=None,
)
self.second_bot_profile = do_create_user(
email="bar-bot@zulip.com",
password="test",
realm=get_realm("zulip"),
full_name="BarBot",
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
bot_owner=self.user_profile,
acting_user=None,
)
@for_all_bot_types
@patch_queue_publish("zerver.lib.actions.queue_json_publish")
def test_trigger_on_stream_mention_from_user(self, mock_queue_json_publish: mock.Mock) -> None:
content = "@**FooBot** foo bar!!!"
recipient = "Denmark"
trigger = "mention"
message_type = Recipient._type_names[Recipient.STREAM]
def check_values_passed(
queue_name: Any,
trigger_event: Dict[str, Any],
processor: Optional[Callable[[Any], None]] = None,
) -> None:
assert self.bot_profile.bot_type
self.assertEqual(queue_name, BOT_TYPE_TO_QUEUE_NAME[self.bot_profile.bot_type])
self.assertEqual(trigger_event["message"]["content"], content)
self.assertEqual(trigger_event["message"]["display_recipient"], recipient)
self.assertEqual(trigger_event["message"]["sender_email"], self.user_profile.email)
self.assertEqual(trigger_event["message"]["type"], message_type)
self.assertEqual(trigger_event["trigger"], trigger)
self.assertEqual(trigger_event["user_profile_id"], self.bot_profile.id)
mock_queue_json_publish.side_effect = check_values_passed
self.send_stream_message(self.user_profile, "Denmark", content)
self.assertTrue(mock_queue_json_publish.called)
@patch_queue_publish("zerver.lib.actions.queue_json_publish")
def test_no_trigger_on_stream_message_without_mention(
self, mock_queue_json_publish: mock.Mock
) -> None:
sender = self.user_profile
self.send_stream_message(sender, "Denmark")
self.assertFalse(mock_queue_json_publish.called)
@for_all_bot_types
@patch_queue_publish("zerver.lib.actions.queue_json_publish")
def test_no_trigger_on_stream_mention_from_bot(
self, mock_queue_json_publish: mock.Mock
) -> None:
self.send_stream_message(self.second_bot_profile, "Denmark", "@**FooBot** foo bar!!!")
self.assertFalse(mock_queue_json_publish.called)
@for_all_bot_types
@patch_queue_publish("zerver.lib.actions.queue_json_publish")
def test_trigger_on_personal_message_from_user(
self, mock_queue_json_publish: mock.Mock
) -> None:
sender = self.user_profile
recipient = self.bot_profile
def check_values_passed(
queue_name: Any,
trigger_event: Dict[str, Any],
processor: Optional[Callable[[Any], None]] = None,
) -> None:
assert self.bot_profile.bot_type
self.assertEqual(queue_name, BOT_TYPE_TO_QUEUE_NAME[self.bot_profile.bot_type])
self.assertEqual(trigger_event["user_profile_id"], self.bot_profile.id)
self.assertEqual(trigger_event["trigger"], "private_message")
self.assertEqual(trigger_event["message"]["sender_email"], sender.email)
display_recipients = [
trigger_event["message"]["display_recipient"][0]["email"],
trigger_event["message"]["display_recipient"][1]["email"],
]
self.assertTrue(sender.email in display_recipients)
self.assertTrue(recipient.email in display_recipients)
mock_queue_json_publish.side_effect = check_values_passed
self.send_personal_message(sender, recipient, "test")
self.assertTrue(mock_queue_json_publish.called)
@for_all_bot_types
@patch_queue_publish("zerver.lib.actions.queue_json_publish")
def test_no_trigger_on_personal_message_from_bot(
self, mock_queue_json_publish: mock.Mock
) -> None:
sender = self.second_bot_profile
recipient = self.bot_profile
self.send_personal_message(sender, recipient)
self.assertFalse(mock_queue_json_publish.called)
@for_all_bot_types
@patch_queue_publish("zerver.lib.actions.queue_json_publish")
def test_trigger_on_huddle_message_from_user(self, mock_queue_json_publish: mock.Mock) -> None:
self.second_bot_profile.bot_type = self.bot_profile.bot_type
self.second_bot_profile.save()
sender = self.user_profile
recipients = [self.bot_profile, self.second_bot_profile]
profile_ids = [self.bot_profile.id, self.second_bot_profile.id]
def check_values_passed(
queue_name: Any,
trigger_event: Dict[str, Any],
processor: Optional[Callable[[Any], None]] = None,
) -> None:
assert self.bot_profile.bot_type
self.assertEqual(queue_name, BOT_TYPE_TO_QUEUE_NAME[self.bot_profile.bot_type])
self.assertIn(trigger_event["user_profile_id"], profile_ids)
profile_ids.remove(trigger_event["user_profile_id"])
self.assertEqual(trigger_event["trigger"], "private_message")
self.assertEqual(trigger_event["message"]["sender_email"], sender.email)
self.assertEqual(trigger_event["message"]["type"], "private")
mock_queue_json_publish.side_effect = check_values_passed
self.send_huddle_message(sender, recipients, "test")
self.assertEqual(mock_queue_json_publish.call_count, 2)
@for_all_bot_types
@patch_queue_publish("zerver.lib.actions.queue_json_publish")
def test_no_trigger_on_huddle_message_from_bot(
self, mock_queue_json_publish: mock.Mock
) -> None:
sender = self.second_bot_profile
recipients = [self.user_profile, self.bot_profile]
self.send_huddle_message(sender, recipients)
self.assertFalse(mock_queue_json_publish.called)
| {
"repo_name": "punchagan/zulip",
"path": "zerver/tests/test_service_bot_system.py",
"copies": "3",
"size": "22938",
"license": "apache-2.0",
"hash": 643411342700230800,
"line_mean": 39.0314136126,
"line_max": 99,
"alpha_frac": 0.6124771122,
"autogenerated": false,
"ratio": 3.846076458752515,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5958553570952516,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from typing import Any, Callable
from warnings import warn
raise NotImplementedError("""
This module worked, almost, and then I broke it in an attempt to make it better.
Don't use.
""")
def _first_arg(*args, **kwargs):
if len(args) > 0:
return args[0]
else:
try:
return next(iter(kwargs.values()))
except StopIteration:
raise ValueError("There are no inputs: I can't get the first one!")
def if_first_arg_is_none_return_val(func, val=True):
@wraps(func)
def wrapped_func(*args, **kwargs):
if _first_arg(*args, **kwargs) is not None:
return func(*args, **kwargs)
else:
return val
return wrapped_func
def always_true(exc_type, exc_val, exc_tb):
return True
def always_false(exc_type, exc_val, exc_tb):
return False
class ExcCondition:
"""A medley of exception conditions (to be used with HandleExceptions instances)"""
always_true = always_true
always_false = always_false
handle_all = always_true
@staticmethod
def from_exception_classes(*handled_exception_classes):
def handle_exception(exc_type, exc_val, exc_tb):
return issubclass(exc_type, handled_exception_classes)
return if_first_arg_is_none_return_val(handle_exception, False)
class ExcCallback:
"""A medley of exception callbacks (to be used with HandleExceptions instances)"""
always_true = always_true
always_false = always_false
ignore = always_true
@staticmethod
def raise_on_error(exc_type, exc_val, exc_tb):
if exc_type is None:
return True
else:
return False
@staticmethod
def warn_and_ignore(msg=None, category=None, stacklevel=1, source=None):
def exc_callback(exc_type, exc_val, exc_tb):
nonlocal msg
msg = msg or f"{exc_type}: {exc_val}"
warn(msg, category=category, stacklevel=stacklevel, source=source)
return True
return if_first_arg_is_none_return_val(exc_callback, True)
@staticmethod
def print_and_raise(msg=None):
def exc_callback(exc_type, exc_val, exc_tb):
print(msg or f"{exc_type}: {exc_val}")
return True
return if_first_arg_is_none_return_val(exc_callback, True)
Traceback = Any
TypeValTbFunc = Callable[[type, Exception, Any], Any]
class HandleExceptions:
"""
>>> t = 2
>>> t
2
>>> print('hi')
hi
"""
conditions = ExcCondition
callbacks = ExcCallback
def __init__(self,
condition: TypeValTbFunc,
if_condition: TypeValTbFunc = ExcCallback.raise_on_error,
if_not_condition: TypeValTbFunc = ExcCallback.ignore):
self.condition = condition
self.if_condition = if_condition
self.if_not_condition = if_not_condition
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.condition(exc_type, exc_val, exc_tb):
return self.if_condition(exc_type, exc_val, exc_tb)
else:
return self.if_not_condition(exc_type, exc_val, exc_tb)
#
#
# if exc_type is None:
# return True
# elif self.exc_condition(exc_type, exc_val, exc_tb):
# return self.exc_callback(exc_type, exc_val, exc_tb)
# else:
# return False
class ModuleNotFoundWarning(HandleExceptions):
"""Will issue a warning when a ModuleNotFoundError is encountered.
# TODO: doctest, when run, doesn't even find the tests!!!? What the!?! Figure out
>>> with ModuleNotFoundWarning():
... import collections
>>> with ModuleNotFoundWarning():
... import asdf
/D/Dropbox/dev/p3/proj/ut/errors.py:143: UserWarning: It seems you don't have a required package.
warn(self.msg)
>>> with ModuleNotFoundWarning():
... 0 / 0
Traceback (most recent call last):
...
ZeroDivisionError: division by zero
>>>
"""
def __init__(self):
super().__init__(
condition=ExcCondition.from_exception_classes(ModuleNotFoundError),
if_condition=ExcCallback.warn_and_ignore())
class IgnoreErrors(HandleExceptions):
"""Context manager that ignores specific error classes (and their sublcasses)
>>> with IgnoreErrors(ZeroDivisionError):
... print("all is fine here")
all is fine here
>>> with IgnoreErrors(ZeroDivisionError):
... 0 / 0 # should be ignored
>>>
>>> with IgnoreErrors(ZeroDivisionError):
... assert False
Traceback (most recent call last):
...
AssertionError
"""
def __init__(self, *error_classes):
super().__init__(
condition=ExcCondition.from_exception_classes(error_classes),
if_condition=ExcCallback.ignore,
if_not_condition=ExcCallback.raise_on_error
)
self.error_classes = error_classes
class ExpectedError(RuntimeError): ...
class ExpectErrors(IgnoreErrors):
"""
Allow ZeroDivisionError errors to happen, ignoring silently:
>>> with ExpectErrors(ZeroDivisionError):
... 0/0
Allow AssertionError and ValueError errors to happen, ignoring silently:
>>> with ExpectErrors(AssertionError, ValueError):
... raise ValueError("Some value error")
... assert False
>>> with ExpectErrors(AssertionError, ValueError):
... raise ValueError("Some value error")
... raise AssertionError("")
... raise TypeError("")
>>> with ExpectError(TypeError):
... t = 3
Traceback (most recent call last):
...
NameError: name 'ExpectError' is not defined
"""
"""Context manager that expects some specific error classes (and their sublcasses),
raising a ExpectedError if those errors don't happen. """
def __exit__(self, exc_type, exc_val, exc_tb):
expected_error_happened = super().__exit__(exc_type, exc_val, exc_tb)
if not expected_error_happened:
raise ExpectedError("Expected one of these errors (or subclasses thereof) to be raised:"
f"\n{self.error_classes}")
return expected_error_happened
class HandleExceptions:
"""
>>> t = 2
>>> t
2
>>> print('hi')
hi
"""
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.condition(exc_type, exc_val, exc_tb):
return self.if_condition(exc_type, exc_val, exc_tb)
else:
return self.if_not_condition(exc_type, exc_val, exc_tb)
#
# class ModuleNotFoundErrorNiceMessage:
# def __init__(self, msg=None):
# self.msg = msg
#
# def __enter__(self):
# pass
#
# def __exit__(self, exc_type, exc_val, exc_tb):
# if exc_type is ModuleNotFoundError:
# if self.msg is not None:
# warn(self.msg)
# else:
# raise ModuleNotFoundError(f"""
# It seems you don't have required `{exc_val.name}` package for this Store.
# Try installing it by running:
#
# pip install {exc_val.name}
#
# in your terminal.
# For more information: https://pypi.org/project/{exc_val.name}
# """)
#
#
# class ModuleNotFoundWarning:
# def __init__(self, msg="It seems you don't have a required package."):
# self.msg = msg
#
# def __enter__(self):
# pass
#
# def __exit__(self, exc_type, exc_val, exc_tb):
# if exc_type is ModuleNotFoundError:
# warn(self.msg)
# return True
#
#
# class ModuleNotFoundIgnore:
# def __enter__(self):
# pass
#
# def __exit__(self, exc_type, exc_val, exc_tb):
# if exc_type is ModuleNotFoundError:
# pass
# return True
| {
"repo_name": "thorwhalen/ut",
"path": "errors.py",
"copies": "1",
"size": "7899",
"license": "mit",
"hash": -2761587804590433300,
"line_mean": 27.6195652174,
"line_max": 101,
"alpha_frac": 0.6027345234,
"autogenerated": false,
"ratio": 3.8607038123167157,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49634383357167156,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from typing import Any, Dict, cast
from django.conf import settings
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
from django.utils.cache import add_never_cache_headers
from django.utils.module_loading import import_string
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from zerver.decorator import (
authenticated_json_view,
authenticated_rest_api_view,
authenticated_uploads_api_view,
process_as_post,
)
from zerver.lib.response import json_method_not_allowed, json_unauthorized
from zerver.lib.types import ViewFuncT
METHODS = ('GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'PATCH')
FLAGS = ('override_api_url_scheme')
def default_never_cache_responses(view_func: ViewFuncT) -> ViewFuncT:
"""Patched version of the standard Django never_cache_responses
decorator that adds headers to a response so that it will never be
cached, unless the view code has already set a Cache-Control
header.
"""
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
response = view_func(request, *args, **kwargs)
if response.has_header("Cache-Control"):
return response
add_never_cache_headers(response)
return response
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
@default_never_cache_responses
@csrf_exempt
def rest_dispatch(request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Dispatch to a REST API endpoint.
Unauthenticated endpoints should not use this, as authentication is verified
in the following ways:
* for paths beginning with /api, HTTP Basic auth
* for paths beginning with /json (used by the web client), the session token
This calls the function named in kwargs[request.method], if that request
method is supported, and after wrapping that function to:
* protect against CSRF (if the user is already authenticated through
a Django session)
* authenticate via an API key (otherwise)
* coerce PUT/PATCH/DELETE into having POST-like semantics for
retrieving variables
Any keyword args that are *not* HTTP methods are passed through to the
target function.
Never make a urls.py pattern put user input into a variable called GET, POST,
etc, as that is where we route HTTP verbs to target functions.
"""
supported_methods: Dict[str, Any] = {}
if hasattr(request, "saved_response"):
# For completing long-polled Tornado requests, we skip the
# view function logic and just return the response.
return request.saved_response
# duplicate kwargs so we can mutate the original as we go
for arg in list(kwargs):
if arg in METHODS:
supported_methods[arg] = kwargs[arg]
del kwargs[arg]
if 'GET' in supported_methods:
supported_methods.setdefault('HEAD', supported_methods['GET'])
if request.method == 'OPTIONS':
response = HttpResponse(status=204) # No content
response['Allow'] = ', '.join(sorted(supported_methods.keys()))
return response
# Override requested method if magic method=??? parameter exists
method_to_use = request.method
if request.POST and 'method' in request.POST:
method_to_use = request.POST['method']
if method_to_use in supported_methods:
entry = supported_methods[method_to_use]
if isinstance(entry, tuple):
target_function, view_flags = entry
target_function = import_string(target_function)
else:
target_function = import_string(supported_methods[method_to_use])
view_flags = set()
# Set request._query for update_activity_user(), which is called
# by some of the later wrappers.
request._query = target_function.__name__
# We want to support authentication by both cookies (web client)
# and API keys (API clients). In the former case, we want to
# do a check to ensure that CSRF etc is honored, but in the latter
# we can skip all of that.
#
# Security implications of this portion of the code are minimal,
# as we should worst-case fail closed if we miscategorise a request.
# for some special views (e.g. serving a file that has been
# uploaded), we support using the same url for web and API clients.
if ('override_api_url_scheme' in view_flags and
request.META.get('HTTP_AUTHORIZATION', None) is not None):
# This request uses standard API based authentication.
# For override_api_url_scheme views, we skip our normal
# rate limiting, because there are good reasons clients
# might need to (e.g.) request a large number of uploaded
# files or avatars in quick succession.
target_function = authenticated_rest_api_view(skip_rate_limiting=True)(target_function)
elif ('override_api_url_scheme' in view_flags and
request.GET.get('api_key') is not None):
# This request uses legacy API authentication. We
# unfortunately need that in the React Native mobile apps,
# because there's no way to set HTTP_AUTHORIZATION in
# React Native. See last block for rate limiting notes.
target_function = authenticated_uploads_api_view(skip_rate_limiting=True)(target_function)
# /json views (web client) validate with a session token (cookie)
elif not request.path.startswith("/api") and request.user.is_authenticated:
# Authenticated via sessions framework, only CSRF check needed
auth_kwargs = {}
if 'override_api_url_scheme' in view_flags:
auth_kwargs["skip_rate_limiting"] = True
target_function = csrf_protect(authenticated_json_view(target_function, **auth_kwargs))
# most clients (mobile, bots, etc) use HTTP Basic Auth and REST calls, where instead of
# username:password, we use email:apiKey
elif request.META.get('HTTP_AUTHORIZATION', None):
# Wrap function with decorator to authenticate the user before
# proceeding
target_function = authenticated_rest_api_view(
is_webhook='allow_incoming_webhooks' in view_flags,
)(target_function)
# Pick a way to tell user they're not authed based on how the request was made
else:
# If this looks like a request from a top-level page in a
# browser, send the user to the login page
if 'text/html' in request.META.get('HTTP_ACCEPT', ''):
# TODO: It seems like the `?next=` part is unlikely to be helpful
return HttpResponseRedirect(f'{settings.HOME_NOT_LOGGED_IN}?next={request.path}')
# Ask for basic auth (email:apiKey)
elif request.path.startswith("/api"):
return json_unauthorized()
# Logged out user accessing an endpoint with anonymous user access on JSON; proceed.
elif request.path.startswith("/json") and 'allow_anonymous_user_web' in view_flags:
auth_kwargs = dict(allow_unauthenticated=True)
target_function = csrf_protect(authenticated_json_view(
target_function, **auth_kwargs))
# Session cookie expired, notify the client
else:
return json_unauthorized(www_authenticate='session')
if request.method not in ["GET", "POST"]:
# process_as_post needs to be the outer decorator, because
# otherwise we might access and thus cache a value for
# request.REQUEST.
target_function = process_as_post(target_function)
return target_function(request, **kwargs)
return json_method_not_allowed(list(supported_methods.keys()))
| {
"repo_name": "synicalsyntax/zulip",
"path": "zerver/lib/rest.py",
"copies": "2",
"size": "8029",
"license": "apache-2.0",
"hash": 8468365019141807000,
"line_mean": 46.2294117647,
"line_max": 102,
"alpha_frac": 0.6601071117,
"autogenerated": false,
"ratio": 4.342347214710655,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6002454326410654,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from typing import Callable, Any, TypeVar
from threading import Lock
from collections import defaultdict
import datetime
T = TypeVar("T")
def lazy(method: Callable[[Any], T]) -> Callable[[Any], T]:
@wraps(method)
def wrapper(self) -> T:
s = "_lazy__{}".format(method.__name__)
try:
return getattr(self, s)
except AttributeError:
value = method(self)
setattr(self, s, value)
return value
def _lazy_reset(self) -> None:
s = "_lazy__{}".format(method.__name__)
try:
delattr(self, s)
except KeyError:
pass
def _lazy_set(self, value) -> None:
s = "_lazy__{}".format(method.__name__)
setattr(self, s, value)
wrapper._lazy_reset = _lazy_reset
wrapper._lazy_set = _lazy_set
return wrapper
def lazy_property(method: Callable[[Any], T]) -> property:
return property(lazy(method))
class _CacheSegment(object):
def __init__(self) -> None:
self._data = defaultdict(dict)
self._lock = Lock()
def put(self, type: Any, key: Any, value: Any, timeout: int = -1) -> None:
if timeout != 0:
with self._lock:
if timeout != -1:
timeout = datetime.timedelta(seconds=timeout)
self._data[type][key] = (value, timeout, datetime.datetime.now())
def get(self, type: Any, key: Any) -> Any:
with self._lock:
item, timeout, entered = self._data[type][key]
if timeout == -1:
return item
now = datetime.datetime.now()
if now > entered + timeout:
self._data[type].pop(key)
raise KeyError
else:
return item
def get_all(self, type: Any):
with self._lock:
results = []
for key, (item, timeout, entered) in self._data[type].items():
if timeout == -1:
results.append(item)
now = datetime.datetime.now()
if now > entered + timeout:
self._data[type].pop(key)
else:
results.append(item)
return results
def delete(self, type: Any, key: Any) -> None:
with self._lock:
del self._data[type][key]
def contains(self, type: Any, key: Any) -> bool:
with self._lock:
return self._data[type].__contains__(key)
def expire(self, type: Any = None):
if type is None:
types = set(self._data.keys())
else:
types = {type}
for type in types:
for key in self._data[type]:
self.get(type, key)
# TODO: In development. Interface here for beginning integration.
class Cache(_CacheSegment):
pass
| {
"repo_name": "meraki-analytics/merakicommons",
"path": "merakicommons/cache.py",
"copies": "2",
"size": "2876",
"license": "mit",
"hash": -1832020175033381400,
"line_mean": 28.0505050505,
"line_max": 81,
"alpha_frac": 0.5236439499,
"autogenerated": false,
"ratio": 4.126255380200861,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.564989933010086,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from typing import Callable
from demisto_sdk.commands.test_content.mock_server import MITMProxy
def run_with_proxy_configured(function: Callable) -> Callable:
"""
This is a decorator for the 'instance_testing method`.
This decorator configures the proxy in the server before the instance_testing execution and removes it afterwards.
Args:
function: Should be the instance_testing method.
"""
@wraps(function)
def decorated(build, *args, **kwargs):
build.proxy.configure_proxy_in_demisto(proxy=build.servers[0].internal_ip + ':' + MITMProxy.PROXY_PORT,
username=build.username, password=build.password,
server=f'https://localhost:{build.servers[0].ssh_tunnel_port}')
result = function(build, *args, **kwargs)
build.proxy.configure_proxy_in_demisto(proxy='',
username=build.username, password=build.password,
server=f'https://localhost:{build.servers[0].ssh_tunnel_port}')
return result
return decorated
| {
"repo_name": "demisto/content",
"path": "Tests/tools.py",
"copies": "1",
"size": "1195",
"license": "mit",
"hash": 5155368866135028000,
"line_mean": 44.9615384615,
"line_max": 118,
"alpha_frac": 0.6108786611,
"autogenerated": false,
"ratio": 4.631782945736434,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0021526349035445457,
"num_lines": 26
} |
from functools import wraps
from typing import Callable
from flask import abort, g, redirect, request, url_for
from . import app
from .models import Assignment
from .viewmodel import get_source_repos
def login_required(f: Callable):
"""A view function decorator that requires the user is logged in."""
if not app.config['REQUIRE_LOGIN']:
return f
@wraps(f)
def decorated_function(*args, **kwargs):
if g.user is None:
return redirect(url_for('login', next=request.url))
return f(*args, **kwargs)
return decorated_function
def requires_access(model_name: str):
"""A view function decorator that guards access to a model.
Args:
model_name: The decorated function should take a keyword argument named `model_name` + "_id",
whose value is database id of the model.
"""
def wrapper(f):
if not app.config['REQUIRE_LOGIN']:
return f
@wraps(f)
def decorated_function(*args, **kwargs):
object_id = kwargs[model_name + '_id']
if g.user is None:
return redirect(url_for('login', next=request.url))
if not user_has_access(g.user, model_name, object_id):
abort(401)
return f(*args, **kwargs)
return decorated_function
return wrapper
def user_has_access(user, model_name: str, object_id: int) -> bool:
"""Determine whether user has access to the specified instance of model_name.
Arguments:
model_name: hardcoded to one of 'assignment' or 'repo'
Returns:
Return True iff user has access to instance of model_name
Note:
This function is used as a helper for `requires_access`on
"""
if model_name == 'assignment':
assignment = Assignment.query.get(object_id)
model_name, object_id = 'repo', assignment.repo_id
assert model_name == 'repo'
return object_id in [repo.id for repo in get_source_repos(user)]
| {
"repo_name": "osteele/assignment-dashboard",
"path": "assignment_dashboard/decorators.py",
"copies": "2",
"size": "1992",
"license": "mit",
"hash": -6422927633233379000,
"line_mean": 30.619047619,
"line_max": 101,
"alpha_frac": 0.6345381526,
"autogenerated": false,
"ratio": 4.024242424242424,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5658780576842425,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from typing import Callable
from .. import uwsgi
class Lock:
"""Locks related stuff.
Lock number 0 is always available. More locks need to be registered
with ``.config.locking.set_basic_params(count=X)`` where ``X`` is the number of locks.
.. note:: The same lock should be released before next acquiring.
Can be used as context manager:
.. code-block:: python
with Lock():
do()
Can de used as a decorator:
.. code-block:: python
@Lock()
def do():
pass
"""
__slots__ = ['num']
def __init__(self, num: int = 0):
"""
:param num: Lock number (0-64). 0 is always available and is used as default.
"""
self.num = num
def __int__(self):
return self.num
def __call__(self, func: Callable):
@wraps(func)
def wrapper(*args, **kwargs):
with self:
return func(*args, **kwargs)
return wrapper
@property
def is_set(self) -> bool:
""""Checks whether the lock is active.
:raises ValueError: For Spooler or invalid lock number
"""
return uwsgi.is_locked(self.num)
def acquire(self):
"""Sets the lock.
:raises ValueError: For Spooler or invalid lock number
"""
uwsgi.lock(self.num)
return True
def release(self):
"""Unlocks the lock.
:raises ValueError: For Spooler or invalid lock number
"""
uwsgi.unlock(self.num)
return True
__enter__ = acquire
def __exit__(self, exc_type, exc_value, traceback):
self.release()
lock = Lock
"""Convenience alias for ``Lock``."""
| {
"repo_name": "idlesign/uwsgiconf",
"path": "uwsgiconf/runtime/locking.py",
"copies": "1",
"size": "1772",
"license": "bsd-3-clause",
"hash": -6261080978706902000,
"line_mean": 19.367816092,
"line_max": 90,
"alpha_frac": 0.5485327314,
"autogenerated": false,
"ratio": 4.219047619047619,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000259964574319025,
"num_lines": 87
} |
from functools import wraps
from typing import Callable
from .. import uwsgi
stop = uwsgi.stop
reload = uwsgi.reload
disconnect = uwsgi.disconnect
set_process_name = uwsgi.setprocname
class harakiri_imposed:
"""Decorator and context manager.
Allows temporarily setting harakiri timeout for a function or a code block.
.. note:: This is for workers, mules and spoolers.
Examples:
.. code-block:: python
@harakiri_imposed(1)
def doomed():
do()
.. code-block:: python
with harakiri_imposed(10):
do()
"""
def __init__(self, timeout: int):
"""
:param timeout: Timeout (seconds) before harakiri.
"""
self._timeout = timeout
def __call__(self, func: Callable):
timeout = self._timeout
@wraps(func)
def wrapped(*args, **kwargs):
uwsgi.set_user_harakiri(timeout)
try:
result = func(*args, **kwargs)
finally:
uwsgi.set_user_harakiri(0)
return result
return wrapped
def __enter__(self):
uwsgi.set_user_harakiri(self._timeout)
def __exit__(self, exc_type, exc_val, exc_tb):
uwsgi.set_user_harakiri(0)
| {
"repo_name": "idlesign/uwsgiconf",
"path": "uwsgiconf/runtime/control.py",
"copies": "1",
"size": "1294",
"license": "bsd-3-clause",
"hash": -2147119245782430500,
"line_mean": 18.9076923077,
"line_max": 79,
"alpha_frac": 0.5610510046,
"autogenerated": false,
"ratio": 3.874251497005988,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9935302501605988,
"avg_score": 0,
"num_lines": 65
} |
from functools import wraps
from typing import Dict, Optional, Union
import jembatan.core.spandex as spandex
def process_default_view(f):
"""
For single-view analysis functions it's often easier to define manipulations in terms
of the view/spandex object instead of having to get the view from the parent jem object
"""
@wraps(f)
def process_wrapper(self, jemdoc: spandex.JembatanDoc, *args, **kwds):
spndx = jemdoc.default_view
return f(self, spndx, **kwds)
return process_wrapper
class AnalysisFunction(object):
"""
Base annotator class for processing Spandex objects. This is provided as a convenience
for object-oriented development of annotators. By virtue of python duck-typing
any function that accepts a Spandex object or any class that overrides
__call__(self, spndx) will work as well.
"""
def process(self, jemdoc: spandex.JembatanDoc, **kwargs):
"""
Override this method to define Annotator behavior. Typically this is used to add annotation or data to the
Spandex object.
Args:
jemdoc(:obj:`JembatanDoc`) - JembatanDoc object to process
**kwargs - Arbitrary keyword arguments. These are typically
defined per AnalysisFunction. Use these to inject behavior
changes that can not be accounted for during initialization
"""
pass
def __call__(self, jemdoc: spandex.JembatanDoc, **kwargs):
""" Processes Spandex object. In most cases this should not be
overridden. Instead subclasses should override the `process` method.
Args:
jemdoc(:obj:`JembatanDoc`) - JembatanDoc object to process
**kwargs - Arbitrary keyword arguments. These are typically
defined per AnalysisFunction. Use these to inject behavior
changes that can not be accounted for during initialization
"""
self.process(jemdoc, **kwargs)
class AggregateAnalysisFunction(AnalysisFunction):
""" A 'simple' class for orchestrating annotators which serially process a JembatanDoc object.
Beyond simply passing the same JembatanDoc between annotators,
`AggregateAnalysisFunction` has support for mapping of view names.
This allows annotators which operate in specific views to be reused on
different views without need for re-instantiating or re-configuring
the annotator. This allows for reuse of components that operate over specific view names.
The most common use case is for running single view components, which process the default view
on alternative views.
Consider a tokenizer that runs on the default view. For our pipeline we actually need it to run
on Gold and Test views of our JembatanDoc container.
Usage:
# for the sake of example assume:
# 1. spandex has been initialized with text in views named "gold", "test".
# 2. sentence_annotator is a single-view function or functor that accepts a jembatan and only
# manipulates the default view without retrieving other views
# load/initialize jembatan
jemdoc = ...
agg_pipeline = AggregateAnalysisFunction()
# annotate sentences on the default view
agg_pipeline.add(sentence_annotator)
# annotate sentences on the gold view
agg_pipeline.add(sentence_annotator, {spandex.constants.SPANDEX_DEFAULT_VIEW: "gold"})
# annotate sentences on the test view
agg_pipeline.add(sentence_annotator, {spandex.constants.SPANDEX_DEFAULT_VIEW: "test"})
# run the pipeline on a the jembatan doc
agg_pipeline(jemdoc)
"""
def __init__(self):
"""
Create empty Aggregate Annotator
"""
self.annotators = []
self.view_maps = []
self.af_kwargs_list = []
def add(self, analysis_func: AnalysisFunction, view_map: Optional[Dict[str, str]]=None, **kwargs):
""" Add analysis function to pipeline
Args:
analysis func(:obj:) a function or an object with
'__call__(jemdoc, **kwargs)' implemented.
An analysis function accepts and processes a Spandex object
view_map (dict, optional): A dictionary mapping between the
views used internally by the analysis function and the views present in
the spandex. Defaults of None indicates not to do mapping.
**kwargs: extra parameters to pass to analysis function.process() to allow
for change in runtime behavior separate from remapping of view
names. These are intended to allow for reuse of components
without need to initialize a new object.
"""
self.annotators.append(analysis_func)
self.view_maps.append(view_map)
self.af_kwargs_list.append(kwargs)
def process(self, jemdoc: spandex.JembatanDoc, **kwargs):
"""
Runs the aggregate analysis function (pipeline) defined through calls
to the `add` method.
Args:
jemdoc (:obj:`Spandex`): JembatanDoc document object to process
**kwargs: Arbitrary keyword arguments. Not currently used
"""
# Under the hood this aggregate annotator will wrap the Spandex object up
# for consumption by the annotator.
for step, (annotator, view_map, af_kwargs) in \
enumerate(zip(self.annotators, self.view_maps, self.af_kwargs_list)):
if view_map:
mapped_jemdoc = spandex.ViewMappedJembatanDoc(jemdoc, view_map)
else:
mapped_jemdoc = jemdoc
annotator(mapped_jemdoc, **af_kwargs)
| {
"repo_name": "leebecker/jembatan",
"path": "jembatan/core/af/__init__.py",
"copies": "1",
"size": "5767",
"license": "apache-2.0",
"hash": 7696643039530299000,
"line_mean": 40.1928571429,
"line_max": 115,
"alpha_frac": 0.6608288538,
"autogenerated": false,
"ratio": 4.382218844984802,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5543047698784802,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from typing import List, Mapping, Union, Any
from django.http import QueryDict, HttpResponseNotAllowed
from django.utils.decorators import available_attrs
from util.strutils import TemplateString
def make_querydict_from_request(func):
@wraps(func, assigned=available_attrs(func))
def inner(request, *args, **kwargs):
if request.method not in ['GET', 'POST']:
read_request_body_to(request, request.method)
return func(request, *args, **kwargs)
return inner
def require_http_methods_plus(method_types: List[str], required_args: Union[Mapping[str, List[str]], List[str]]=None,
method_props: Mapping[str, List[str]]=None):
"""
Enhances the possible functionality of the standard require_http_methods function from django.
If just method_types is provided then this decorator acts exactly the same way as the standard function.
If required_args is provided as a list then whatever method type is provided, the view arguments are verified to
ensure all arguments in required_args appear as a key in the view arguments.
If required_args is provided as a dictionary, then if the method type appears in that dictionary as a key it must
be mapped to a list of argument names. This list of argument names is then verified the same way as above.
If method_props is provided it must be a dictionary. If the method_type appears in that dictionary then it must
be mapped to a list of property names. This list of property names is then checked against the properties in the
QueryDict for our request method. If any property is missing, an error occurs.
:param method_types: List of Request Method names allowed to pass through
:param required_args: List of requires arguments for any request method or Map from Request Method names to required
arguments
:param method_props: Map from Request Method names to required method properties
"""
if required_args is None:
required_args = {}
if method_props is None:
method_props = {}
invalid_type = TemplateString("{method} is not one of the allowed request methods ({types})!")
missing_props = TemplateString("Request of type {method} must have following properties: {props}")
missing_args = TemplateString("Request missing arguments. Has {args}, missing {missing}")
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner(request, *args, **kwargs):
# Verify method is at least a valid method type
if request.method not in method_types:
print("METHOD NOT ALLOWED", invalid_type(method=request.method, types=method_types))
return HttpResponseNotAllowed(method_types)
# Check that all required properties are in QueryDict, if any are required
method_dict = getattr(request, request.method)
if request.method in method_props and len([x for x in method_props[request.method] if x not in method_dict]) > 0:
print("METHOD NOT ALLOWED", missing_props(method=request.method, props=method_props))
return HttpResponseNotAllowed(
method_types,
reason=missing_props(method=request.method, props=method_props)
)
# Sanitize, either required_args is a list or map, normalize to a list
required_args_list = required_args
if isinstance(required_args, dict):
required_args_list = required_args[request.method] if request.method in required_args else []
# Check that all required arguments exist in the view arguments
missing_args_list = [x for x in required_args_list if x not in kwargs]
if len(missing_args_list) > 0:
print("METHOD NOT ALLOWED", missing_args(required_args_list-missing_args_list, missing_args_list))
return HttpResponseNotAllowed(
method_types,
reason=missing_args(args=(required_args_list-missing_args_list), missing=missing_args_list)
)
return func(request, *args, **kwargs)
return inner
return decorator
def ajax_success(**kwargs) -> dict:
kwargs.update({'success': True})
return kwargs
def ajax_failure(**kwargs) -> dict:
kwargs.update({'success': False})
return kwargs
def is_safe_request(method: str) -> bool:
while hasattr(method, 'method'):
method = method.method
return method in ('GET', 'HEAD')
def read_request_body_to_post(request) -> None:
"""
Takes a request and stores the request body into a POST QueryDict. By default only the GET QueryDict exists.
:param request: Request object
"""
request.POST = QueryDict(request.body)
def read_request_body_to(request, method: str='POST') -> None:
"""
Takes a request method (or really any string) and a request object and stores the request body into a QueryDict
which is then stored in the request at a property named after the method provided.
read_request_body_to(req) -> req.POST now exists
read_request_body_to(req, "HEAD") -> req.HEAD now exists
read_request_body_to(req, "delete") -> req.DELETE now exists
:param request:
:param method:
:return:
"""
setattr(request, method.upper(), QueryDict(request.body))
| {
"repo_name": "FRC-RS/FRS",
"path": "util/viewutils.py",
"copies": "1",
"size": "5464",
"license": "mit",
"hash": 9061427298883531000,
"line_mean": 41.6875,
"line_max": 125,
"alpha_frac": 0.6740483163,
"autogenerated": false,
"ratio": 4.367705835331734,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5541754151631735,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from typing import List, TypeVar, Callable, Tuple, Dict
from typing import Optional
from genes.lib.logging import log_warn, log_error
from genes.lib.traits import ErrorLevel
from genes.linux.traits import get_distro, get_version, get_codename
T = TypeVar('T')
def is_alpine(versions: Optional[List[str]] = None) -> bool:
is_version = True
if versions:
is_version = get_version() in versions or get_codename() in versions
return get_distro() == 'alpine' and is_version
def only_alpine(error_level: ErrorLevel = ErrorLevel.warn, versions: Optional[List[str]] = None):
msg = "This function can only be run in alpine"
def wrapper(func: Callable[[Tuple, Dict], T]) -> Callable:
@wraps
def run_if_alpine(*args: Tuple, **kwargs: Dict) -> Optional[T]:
if is_alpine(versions=versions):
return func(*args, **kwargs)
elif error_level == ErrorLevel.warn:
log_warn(msg, func.__name__)
return None
elif error_level == ErrorLevel.error:
log_error(msg, func.__name__)
raise OSError(msg, func.__name__)
else:
return None
return run_if_alpine
return wrapper
| {
"repo_name": "hatchery/Genepool2",
"path": "genes/alpine/traits.py",
"copies": "2",
"size": "1277",
"license": "mit",
"hash": -3853007189620068400,
"line_mean": 32.6052631579,
"line_max": 97,
"alpha_frac": 0.6217697729,
"autogenerated": false,
"ratio": 3.8119402985074626,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5433710071407463,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from typing import *
from uuid import uuid4 as uuid
import observable
from pros.common import logger
_uuid_table = dict() # type: Dict[str, Observable]
class Observable(observable.Observable):
"""
Wrapper class for the observable package for use in interactive UI. It registers itself with a global registry
to facilitate updates from any context (e.g. from a renderer).
"""
@classmethod
def notify(cls, uuid, event, *args, **kwargs):
"""
Triggers an Observable given its UUID. See arguments for Observable.trigger
"""
if isinstance(uuid, Observable):
uuid = uuid.uuid
if uuid in _uuid_table:
_uuid_table[uuid].trigger(event, *args, **kwargs)
else:
logger(__name__).warning(f'Could not find an Observable to notify with UUID: {uuid}', sentry=True)
def on(self, event, *handlers,
bound_args: Tuple[Any, ...] = None, bound_kwargs: Dict[str, Any] = None,
asynchronous: bool = False) -> Callable:
"""
Sets up a callable to be called whenenver "event" is triggered
:param event: Event to bind to. Most classes expose an e.g. "on_changed" wrapper which provides the correct
event string
:param handlers: A list of Callables to call when event is fired
:param bound_args: Bind ordered arguments to the Callable. These are supplied before the event's supplied
arguments
:param bound_kwargs: Bind keyword arguments to the Callable. These are supplied before the event's supplied
kwargs. They should not conflict with the supplied event kwargs
:param asynchronous: If true, the Callable will be called in a new thread. Useful if the work to be done from
an event takes a long time to process
:return:
"""
if bound_args is None:
bound_args = []
if bound_kwargs is None:
bound_kwargs = {}
if asynchronous:
def bind(h):
def bound(*args, **kw):
from threading import Thread
from pros.common.utils import with_click_context
t = Thread(target=with_click_context(h), args=(*bound_args, *args), kwargs={**bound_kwargs, **kw})
t.start()
return t
return bound
else:
def bind(h):
@wraps(h)
def bound(*args, **kw):
return h(*bound_args, *args, **bound_kwargs, **kw)
return bound
return super(Observable, self).on(event, *[bind(h) for h in handlers])
def trigger(self, event, *args, **kw):
logger(__name__).debug(f'Triggered {self.uuid} ({type(self).__name__}) "{event}" event: {args} {kw}')
return super().trigger(event, *args, **kw)
def __init__(self):
self.uuid = str(uuid())
_uuid_table[self.uuid] = self
super(Observable, self).__init__()
| {
"repo_name": "purduesigbots/pros-cli",
"path": "pros/common/ui/interactive/observable.py",
"copies": "1",
"size": "3113",
"license": "mpl-2.0",
"hash": -3671372661175734000,
"line_mean": 38.9102564103,
"line_max": 118,
"alpha_frac": 0.5772566656,
"autogenerated": false,
"ratio": 4.359943977591036,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5437200643191036,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from typing import *
from couchbase_core.supportability import internal
from .options import Cardinal, OptionBlock, OptionBlockTimeOut
from couchbase_core.durability import Durability
from datetime import timedelta
try:
from typing import TypedDict
except:
from typing_extensions import TypedDict
ReplicateTo = Cardinal
PersistTo = Cardinal
T = TypeVar('T', bound=OptionBlock)
class DurabilityTypeBase(dict):
def __init__(self, content):
super(DurabilityTypeBase,self).__init__(**content)
class DurabilityType(dict):
@internal
def __init__(self, # type: DurabilityType
content # type: Dict[str, Any]
):
# type: (...) -> None
"""
Durability configuration options
:param content: dictionary passed up from subclasses
"""
super(DurabilityType, self).__init__(content)
class ClientDurability(DurabilityType):
Storage = TypedDict('Storage', {'replicate_to': ReplicateTo, 'persist_to': PersistTo}, total=True)
def __init__(self, # type: T
replicate_to=ReplicateTo.NONE, # type: ReplicateTo
persist_to=PersistTo.NONE # type: PersistTo
):
# type: (...) -> None
"""
Client Durability
:param persist_to: If set, wait for the item to be removed
from the storage of at least these many nodes
:param replicate_to: If set, wait for the item to be removed
from the cache of at least these many nodes
(excluding the master)
"""
super(ClientDurability, self).__init__(ClientDurability.Storage(replicate_to=replicate_to, persist_to=persist_to))
class ServerDurability(DurabilityType):
Storage = TypedDict('Storage', {'level': Durability}, total=True)
def __init__(self, # type: ServerDurability
level, # type: Durability
):
# type: (...) -> None
"""
Server-based Durability (Synchronous Replication)
:param Durability level: durability level
"""
super(ServerDurability, self).__init__(ServerDurability.Storage(level=level))
class ClientDurableOptionBlock(OptionBlockTimeOut):
def __init__(self, # type: ClientDurableOptionBlock
timeout=None, # type: timedelta
durability=None # type: ClientDurability
):
# type: (...) -> None
"""
Options for operations with client-type durability
:param durability: Client durability settings
:param timeout: Timeout for operation
"""
super(ClientDurableOptionBlock, self).__init__(durability=durability, timeout=timeout)
class ServerDurableOptionBlock(OptionBlockTimeOut):
def __init__(self, # type: ServerDurableOptionBlock
timeout=None, # type: timedelta
durability=None # type: ServerDurability
):
# type: (...) -> None
"""
Options for operations with server-type durability
:param durability: Server durability settings
:param timeout: Timeout for operation
"""
super(ServerDurableOptionBlock, self).__init__(durability=durability, timeout=timeout)
class DurabilityOptionBlock(OptionBlockTimeOut):
def __init__(self, # type: DurabilityOptionBlock
timeout=None, # type: timedelta
durability=None, # type: DurabilityType
expiry=None, # type: timedelta
**kwargs):
# type: (...) -> None
"""
Options for operations with any type of durability
:param durability: Durability settings
:param expiry: When any mutation should expire
:param timeout: Timeout for operation
"""
super(DurabilityOptionBlock, self).__init__(durability=durability, expiry=expiry, timeout=timeout, **kwargs)
@property
def expiry(self):
return self.get('expiry', None)
| {
"repo_name": "couchbase/couchbase-python-client",
"path": "couchbase/durability.py",
"copies": "1",
"size": "4102",
"license": "apache-2.0",
"hash": 8544462539466757000,
"line_mean": 31.816,
"line_max": 122,
"alpha_frac": 0.6133593369,
"autogenerated": false,
"ratio": 4.259605399792315,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001608124150479709,
"num_lines": 125
} |
from functools import wraps
from unittest import mock
from django.contrib.auth.models import User
from django.test.testcases import TestCase
from django.urls.base import reverse
from builds.factories import ProjectFactory
def authenticated_test_case(f):
@wraps(f)
def inner(self, *args, **kwargs):
self.user = User(username='test_user')
self.user.set_password('test_pass')
self.user.save()
self.assertTrue(self.client.login(username='test_user', password='test_pass'))
return f(self, *args, **kwargs)
return inner
class APIBuildsTests(TestCase):
@mock.patch('django.db.transaction.on_commit')
@authenticated_test_case
def test_create(self, mock_on_commit):
project = ProjectFactory()
r = self.client.post(
reverse('builds:api_builds', args=[project.id]),
{
'commit_hash': 'asdf',
}
)
self.assertEqual(r.status_code, 201)
build = project.builds.get()
self.assertEqual(mock_on_commit.call_count, 1)
self.assertEqual(build.commit_hash, 'asdf')
class APIScreenshots(TestCase):
pass
| {
"repo_name": "karamanolev/persephone",
"path": "persephone/builds/tests/test_views.py",
"copies": "1",
"size": "1163",
"license": "mit",
"hash": -9179468938144267000,
"line_mean": 27.3658536585,
"line_max": 86,
"alpha_frac": 0.6474634566,
"autogenerated": false,
"ratio": 3.8131147540983608,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4960578210698361,
"avg_score": null,
"num_lines": null
} |
#from functools import wraps
from unittest import TestCase
from IPython import embed
import numpy as np
from peewee import *
from playhouse.postgres_ext import *
db = PostgresqlExtDatabase(database='gkdb_test')
db.execute_sql('CREATE EXTENSION IF NOT EXISTS hstore;')
db.execute_sql('CREATE SCHEMA IF NOT EXISTS develop;')
cur = db.execute_sql("SELECT rolname, rolsuper FROM pg_roles WHERE rolname = 'testuser';")
if len(cur.fetchall()) == 0:
db.execute_sql('CREATE ROLE testuser;')
class DatabaseTestCase(TestCase):
database = db
def setUp(self):
if not self.database.is_closed():
self.database.close()
self.database.connect()
super(DatabaseTestCase, self).setUp()
def tearDown(self):
super(DatabaseTestCase, self).tearDown()
self.database.close()
def execute(self, sql, params=None):
return self.database.execute_sql(sql, params)
class ModelDatabaseTestCase(DatabaseTestCase):
database = db
requires = None
def setUp(self):
super(ModelDatabaseTestCase, self).setUp()
self._db_mapping = {}
# Override the model's database object with test db.
if self.requires:
for model in self.requires:
self._db_mapping[model] = model._meta.database
model._meta.set_database(self.database)
def tearDown(self):
# Restore the model's previous database object.
if self.requires:
for model in self.requires:
model._meta.set_database(self._db_mapping[model])
super(ModelDatabaseTestCase, self).tearDown()
class ModelTestCase(ModelDatabaseTestCase):
database = db
requires = None
def setUp(self):
super(ModelTestCase, self).setUp()
if self.requires:
self.database.drop_tables(self.requires, safe=True, cascade=True)
self.database.create_tables(self.requires)
def tearDown(self):
# Restore the model's previous database object.
db.rollback()
try:
if self.requires:
self.database.drop_tables(self.requires, safe=True, cascade=True)
finally:
super(ModelTestCase, self).tearDown()
def assertNumpyArrayEqual(self, x, y, msg='', verbose=True):
np.testing.assert_array_equal(x, y, err_msg=msg, verbose=verbose)
def assertNumpyArrayListEqual(self, x, y, msg='', verbose=True):
np.testing.assert_equal(x, y, err_msg=msg, verbose=verbose)
def requires_models(*models):
def decorator(method):
@wraps(method)
def inner(self):
_db_mapping = {}
for model in models:
_db_mapping[model] = model._meta.database
model._meta.set_database(self.database)
self.database.drop_tables(models, safe=True)
self.database.create_tables(models)
try:
method(self)
finally:
try:
self.database.drop_tables(models)
except:
pass
for model in models:
model._meta.set_database(_db_mapping[model])
return inner
return decorator
| {
"repo_name": "gkdb/gkdb",
"path": "tests/gkdb/core/base.py",
"copies": "1",
"size": "3226",
"license": "mit",
"hash": 4781914000479231000,
"line_mean": 30.9405940594,
"line_max": 90,
"alpha_frac": 0.6171729696,
"autogenerated": false,
"ratio": 4.173350582147477,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5290523551747477,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from unittest import TestCase
import warnings
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required, permission_required, user_passes_test
from django.http import HttpResponse, HttpRequest, HttpResponseNotAllowed
from django.middleware.clickjacking import XFrameOptionsMiddleware
from django.utils.decorators import method_decorator
from django.utils.functional import allow_lazy, lazy, memoize
from django.views.decorators.cache import cache_page, never_cache, cache_control
from django.views.decorators.clickjacking import xframe_options_deny, xframe_options_sameorigin, xframe_options_exempt
from django.views.decorators.http import require_http_methods, require_GET, require_POST, require_safe, condition
from django.views.decorators.vary import vary_on_headers, vary_on_cookie
def fully_decorated(request):
"""Expected __doc__"""
return HttpResponse('<html><body>dummy</body></html>')
fully_decorated.anything = "Expected __dict__"
def compose(*functions):
# compose(f, g)(*args, **kwargs) == f(g(*args, **kwargs))
functions = list(reversed(functions))
def _inner(*args, **kwargs):
result = functions[0](*args, **kwargs)
for f in functions[1:]:
result = f(result)
return result
return _inner
full_decorator = compose(
# django.views.decorators.http
require_http_methods(["GET"]),
require_GET,
require_POST,
require_safe,
condition(lambda r: None, lambda r: None),
# django.views.decorators.vary
vary_on_headers('Accept-language'),
vary_on_cookie,
# django.views.decorators.cache
cache_page(60 * 15),
cache_control(private=True),
never_cache,
# django.contrib.auth.decorators
# Apply user_passes_test twice to check #9474
user_passes_test(lambda u: True),
login_required,
permission_required('change_world'),
# django.contrib.admin.views.decorators
staff_member_required,
# django.utils.functional
allow_lazy,
lazy,
)
# suppress the deprecation warning of memoize
with warnings.catch_warnings(record=True):
fully_decorated = memoize(fully_decorated, {}, 1)
fully_decorated = full_decorator(fully_decorated)
class DecoratorsTest(TestCase):
def test_attributes(self):
"""
Tests that django decorators set certain attributes of the wrapped
function.
"""
self.assertEqual(fully_decorated.__name__, 'fully_decorated')
self.assertEqual(fully_decorated.__doc__, 'Expected __doc__')
self.assertEqual(fully_decorated.__dict__['anything'], 'Expected __dict__')
def test_user_passes_test_composition(self):
"""
Test that the user_passes_test decorator can be applied multiple times
(#9474).
"""
def test1(user):
user.decorators_applied.append('test1')
return True
def test2(user):
user.decorators_applied.append('test2')
return True
def callback(request):
return request.user.decorators_applied
callback = user_passes_test(test1)(callback)
callback = user_passes_test(test2)(callback)
class DummyUser(object):
pass
class DummyRequest(object):
pass
request = DummyRequest()
request.user = DummyUser()
request.user.decorators_applied = []
response = callback(request)
self.assertEqual(response, ['test2', 'test1'])
def test_cache_page_new_style(self):
"""
Test that we can call cache_page the new way
"""
def my_view(request):
return "response"
my_view_cached = cache_page(123)(my_view)
self.assertEqual(my_view_cached(HttpRequest()), "response")
my_view_cached2 = cache_page(123, key_prefix="test")(my_view)
self.assertEqual(my_view_cached2(HttpRequest()), "response")
def test_require_safe_accepts_only_safe_methods(self):
"""
Test for the require_safe decorator.
A view returns either a response or an exception.
Refs #15637.
"""
def my_view(request):
return HttpResponse("OK")
my_safe_view = require_safe(my_view)
request = HttpRequest()
request.method = 'GET'
self.assertIsInstance(my_safe_view(request), HttpResponse)
request.method = 'HEAD'
self.assertIsInstance(my_safe_view(request), HttpResponse)
request.method = 'POST'
self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed)
request.method = 'PUT'
self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed)
request.method = 'DELETE'
self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed)
# For testing method_decorator, a decorator that assumes a single argument.
# We will get type arguments if there is a mismatch in the number of arguments.
def simple_dec(func):
def wrapper(arg):
return func("test:" + arg)
return wraps(func)(wrapper)
simple_dec_m = method_decorator(simple_dec)
# For testing method_decorator, two decorators that add an attribute to the function
def myattr_dec(func):
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.myattr = True
return wraps(func)(wrapper)
myattr_dec_m = method_decorator(myattr_dec)
def myattr2_dec(func):
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.myattr2 = True
return wraps(func)(wrapper)
myattr2_dec_m = method_decorator(myattr2_dec)
class MethodDecoratorTests(TestCase):
"""
Tests for method_decorator
"""
def test_preserve_signature(self):
class Test(object):
@simple_dec_m
def say(self, arg):
return arg
self.assertEqual("test:hello", Test().say("hello"))
def test_preserve_attributes(self):
# Sanity check myattr_dec and myattr2_dec
@myattr_dec
@myattr2_dec
def func():
pass
self.assertEqual(getattr(func, 'myattr', False), True)
self.assertEqual(getattr(func, 'myattr2', False), True)
# Now check method_decorator
class Test(object):
@myattr_dec_m
@myattr2_dec_m
def method(self):
"A method"
pass
self.assertEqual(getattr(Test().method, 'myattr', False), True)
self.assertEqual(getattr(Test().method, 'myattr2', False), True)
self.assertEqual(getattr(Test.method, 'myattr', False), True)
self.assertEqual(getattr(Test.method, 'myattr2', False), True)
self.assertEqual(Test.method.__doc__, 'A method')
self.assertEqual(Test.method.__name__, 'method')
class XFrameOptionsDecoratorsTests(TestCase):
"""
Tests for the X-Frame-Options decorators.
"""
def test_deny_decorator(self):
"""
Ensures @xframe_options_deny properly sets the X-Frame-Options header.
"""
@xframe_options_deny
def a_view(request):
return HttpResponse()
r = a_view(HttpRequest())
self.assertEqual(r['X-Frame-Options'], 'DENY')
def test_sameorigin_decorator(self):
"""
Ensures @xframe_options_sameorigin properly sets the X-Frame-Options
header.
"""
@xframe_options_sameorigin
def a_view(request):
return HttpResponse()
r = a_view(HttpRequest())
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
def test_exempt_decorator(self):
"""
Ensures @xframe_options_exempt properly instructs the
XFrameOptionsMiddleware to NOT set the header.
"""
@xframe_options_exempt
def a_view(request):
return HttpResponse()
req = HttpRequest()
resp = a_view(req)
self.assertEqual(resp.get('X-Frame-Options', None), None)
self.assertTrue(resp.xframe_options_exempt)
# Since the real purpose of the exempt decorator is to suppress
# the middleware's functionality, let's make sure it actually works...
r = XFrameOptionsMiddleware().process_response(req, resp)
self.assertEqual(r.get('X-Frame-Options', None), None)
| {
"repo_name": "yceruto/django",
"path": "tests/decorators/tests.py",
"copies": "6",
"size": "8387",
"license": "bsd-3-clause",
"hash": -665177422646794600,
"line_mean": 31.3822393822,
"line_max": 118,
"alpha_frac": 0.6457612972,
"autogenerated": false,
"ratio": 4.077297034516286,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7723058331716286,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from urllib.error import URLError
from django.db import models
from django.urls import reverse
#from amazonproduct import API as AmazonAPI
from manabi.apps.utils.slugs import slugify
from django.conf import settings
#TODO-OLD find different way.
#amazon_api = AmazonAPI(settings.AWS_KEY, settings.AWS_SECRET_KEY, 'us')
class DeckedTextbookManager(models.Manager):
def get_query_set(self):
return super(DeckedTextbookManager, self).get_query_set().filter(
deck__active=True, deck__shared=True).distinct()
def uses_amazon_api(func):
@wraps(func)
def wrapped(self, *args, **kwargs):
if not self.isbn:
raise Exception('Textbook has no ISBN.')
return func(self, *args, **kwargs)
return wrapped
class Textbook(models.Model):
objects = models.Manager()
decked_objects = DeckedTextbookManager()
slug = models.SlugField(blank=True) # Defaults to max_length=50
isbn = models.CharField(max_length=13)
custom_title = models.CharField(max_length=200, blank=True,
help_text='Set this to override the Amazon product name.')
#TODO-OLD student level field
class Meta:
app_label = 'flashcards'
def __unicode__(self):
try:
return self.get_basic_info()['title'] + ' [{0}]'.format(self.isbn)
except URLError:
return 'ISBN: {0}'.format(self.isbn)
def save(self, *args, **kwargs):
title = self.get_basic_info()['title']
self.slug = slugify(title)
super(Textbook, self).save(*args, **kwargs)
@property
def shared_decks(self):
return self.deck_set.filter(
active=True, shared=True)
def get_absolute_url(self):
if self.slug:
return reverse('book_detail_with_slug', (), {
'object_id': self.id,
'slug': self.slug,
})
else:
return reverse('book_detail_without_slug', (), {
'object_id': self.id,
})
@property
def cleaned_isbn(self):
return self.isbn.strip().replace('-', '')
def _item_lookup(self, **kwargs):
return
#TODO-OLD fix
return amazon_api.item_lookup(
self.cleaned_isbn, IdType='ISBN', SearchIndex='Books', **kwargs)
@uses_amazon_api
def get_image_urls(self):
'''
Returns a dict with each available image size:
{'size': 'url'}
'''
urls = {}
root = self._item_lookup(ResponseGroup='Images')
for size in ('Small', 'Medium', 'Large'):
urls[size.lower()] = getattr(root.Items.Item, size + 'Image').URL.pyval
return urls
@uses_amazon_api
def get_basic_info(self):
'''
Returns the following in a dict:
author
title
purchase_url
'''
root = self._item_lookup(ResponseGroup='Small')
attribs = root.Items.Item.ItemAttributes
return {
'author': attribs.Author.pyval,
'title': self.custom_title or attribs.Title.pyval,
}
| {
"repo_name": "aehlke/manabi",
"path": "manabi/apps/books/models.py",
"copies": "1",
"size": "3146",
"license": "mit",
"hash": 1971736059722793700,
"line_mean": 28.1296296296,
"line_max": 83,
"alpha_frac": 0.5890019072,
"autogenerated": false,
"ratio": 3.8087167070217918,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4897718614221792,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from urllib.request import Request, urlopen, quote
from flask import Blueprint, g, render_template, request, flash
from flask_login import login_required, current_user
from app.models import ROLE_ADMIN
from app.forms import SendSMSForm
from app import app
admin_module = Blueprint('admin', __name__, template_folder='templates')
def required_roles(*roles):
def wrapper(f):
@wraps(f)
def wrapped(*args, **kwargs):
if g.user.role not in roles:
return "You dont have permission to view this page."
return f(*args, **kwargs)
return wrapped
return wrapper
'''
sms api doc: http://sms.ru/?panel=api&subpanel=method&show=sms/send
'''
@admin_module.route('/admin', methods=['POST', 'GET'])
@login_required
@required_roles(ROLE_ADMIN)
def admin():
send_sms_form = SendSMSForm(request.form)
if request.method == 'POST' and send_sms_form.validate():
text = send_sms_form.sms_text.data
result = send_sms_to_me(text)
if result:
code = int(result.split()[0])
if code == 100:
flash('sms has been send successfully with text: ' + text)
elif code in send_sms_errors.keys():
flash('Error: ' + send_sms_errors[code])
else:
flash('I cant send sms, something wrong, I don\'t know what exactly :(')
else:
flash('I cant send sms, something wrong, I don\'t know what exactly :(')
return render_template('admin.html', form=send_sms_form)
@admin_module.before_request
def before_request():
g.user = current_user
def send_sms_to_me(text):
text = quote(text.encode('cp1251'), safe="/;%[]=:$&()+,!?*@'~")
url_ = 'http://sms.ru/sms/send?api_id=' + app.config['SMS_API_ID'] + '&to=79880116219&text=' + text
req = Request(url=url_)
response = urlopen(req).read()
return response
send_sms_errors = {
200: 'Неправильный api_id',
201: 'не хватает средств на лицевом счету',
202: 'Неправильно указан получатель',
203: 'Нет текста сообщения',
204: 'Имя отправителя не согласовано с администрацией',
205: 'Сообщение слишком длинное (превышает 8 СМС)',
206: 'Будет превышен или уже превышен дневной лимит на отправку сообщений',
207: 'На этот номер (или один из номеров) нельзя отправлять сообщения, '
'либо указано более 100 номеров в списке получателей',
208: 'Параметр time указан неправильно',
209: 'Вы добавили этот номер (или один из номеров) в стоп-лист',
210: 'Используется GET, где необходимо использовать POST',
211: 'Метод не найден',
212: 'Текст сообщения необходимо передать в кодировке UTF-8 (вы передали в другой кодировке)',
220: 'Сервис временно недоступен, попробуйте чуть позже.',
230: 'Сообщение не принято к отправке, так как на один номер в день нельзя отправлять более 60 сообщений.',
300: 'Неправильный token (возможно истек срок действия, либо ваш IP изменился)',
301: 'Неправильный пароль, либо пользователь не найден',
302: 'Пользователь авторизован, но аккаунт не подтвержден (пользователь не ввел код, '
'присланный в регистрационной смс)'
}
| {
"repo_name": "Kwentar/Dream-Crusher",
"path": "app/admin_views.py",
"copies": "1",
"size": "4030",
"license": "apache-2.0",
"hash": -1045146840305112800,
"line_mean": 37.2857142857,
"line_max": 111,
"alpha_frac": 0.6582711443,
"autogenerated": false,
"ratio": 2.2042494859492803,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.33625206302492805,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from urlparse import urlparse
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.shortcuts import resolve_url
from django.utils.decorators import available_attrs
from django.utils.encoding import force_str
from django.utils.functional import curry
from django.http import Http404
def get_available_zone_names():
return settings.ZONE_PROFILES.keys()
def get_zone_profile(zone_name):
zone = dict(settings.ZONE_PROFILES.get(zone_name, {}))
if zone:
zone["name"] = zone_name
return zone
return None
def zone_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, zone=None, login_url=None):
"""
Decorator for views that checks that the zone for logged in user matches,
given zone kwarg redirecting
to the log-in page if necessary.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
r_zone = getattr(request, "zone") or {}
if request.user.is_authenticated():
if r_zone.get("name") == zone:
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
# urlparse chokes on lazy objects in Python 3, force to str
resolved_login_url = force_str(
resolve_url(login_url or r_zone.get("LOGIN_REDIRECT_URL") or settings.LOGIN_URL)
)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
missing_or_matching_login_scheme = not login_scheme or login_scheme == current_scheme
missing_or_matching_login_netloc = not login_netloc or login_netloc == current_netloc
if missing_or_matching_login_scheme and missing_or_matching_login_netloc:
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(path, resolved_login_url, redirect_field_name)
return _wrapped_view
if function:
return decorator(function)
return decorator
call_centre_zone_required = curry(zone_required, zone="call_centre")
cla_provider_zone_required = curry(zone_required, zone="cla_provider")
def manager_member_required(view_func):
"""
Decorator for views that checks that the user is logged in and is a member
member.
"""
@wraps(view_func)
def _checklogin(request, *args, **kwargs):
if request.user.is_authenticated() and request.user.is_manager:
# The user is valid. Continue to the admin page.
return view_func(request, *args, **kwargs)
raise Http404()
return _checklogin
| {
"repo_name": "ministryofjustice/cla_frontend",
"path": "cla_frontend/apps/cla_auth/utils.py",
"copies": "1",
"size": "2950",
"license": "mit",
"hash": 4408250030441393700,
"line_mean": 35.875,
"line_max": 101,
"alpha_frac": 0.6610169492,
"autogenerated": false,
"ratio": 4.002713704206242,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008006754008244389,
"num_lines": 80
} |
from functools import wraps
from util.commons_util.logger.Timer import Timer
__author__ = 'Danyang'
def timestamp(func):
"""
time the execution time of a function
:param func: the function, whose result you would like to cached based on input arguments
"""
def ret(*args):
timer = Timer()
timer.start()
result = func(*args)
print timer.end()
return result
return ret
def print_func_name(func):
"""
print the current executing function name
possible use:
>>> print sys._getframe().f_code.co_name
:param func: the function, whose result you would like to cached based on input arguments
"""
def wrapper(*args):
print func.func_name
result = func(*args)
return result
return wrapper
def trace(func):
"""
print calling information
e.g. f(a, b) -> y
Usage:
@trace
def f(a, b):
...
Issue:
with naive decoration, the func's name will change to the post-wrapped metadata from outside view.
The metadata for inner function is lost.
Using decorators can cause strange behaviors in tools that do
introspection, such as debuggers.
Solution:
Applying it to the wrapper function @wraps(func) will copy all of the
important metadata about the inner function to the outer function.
This will make the decorator more transparent.
:param func:
:return:
"""
@wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
print('%s(%r, %r) -> %r' %
(func.__name__, args, kwargs, result))
return result
return wrapper
| {
"repo_name": "idf/commons-util-py",
"path": "commons_util/decorators/general.py",
"copies": "1",
"size": "1699",
"license": "apache-2.0",
"hash": 500218525816812200,
"line_mean": 24.3582089552,
"line_max": 104,
"alpha_frac": 0.6156562684,
"autogenerated": false,
"ratio": 4.268844221105527,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005202189895743404,
"num_lines": 67
} |
from functools import wraps
from Utils import *
def do(monad):
"""An implementation of Haskell's do-notation, using generators.
Do-notation in Haskell looks like this:
m = do
x <- a
y <- b
pure (x, y)
This implementation in Python looks like this:
@do(Monad)
def m():
x = yield a
y = yield b
Monad.pure((x,y))
Where `Monad` corresponds to the particular monad being used.
"""
def do_wrapper(func):
@wraps(func)
def do_func(*args, **kwargs):
def lazy():
genr = func(*args, **kwargs)
@tco
def send(x):
try: return (genr.send(x).bind, send)
except StopIteration as e:
if e.value != None: x = e.value
return monad.pure(x)
return send(None)
return lazy if args or kwargs else lazy()
return do_func
return do_wrapper
| {
"repo_name": "oisdk/PyParse",
"path": "Do.py",
"copies": "1",
"size": "1031",
"license": "mit",
"hash": 555623947036213800,
"line_mean": 29.3235294118,
"line_max": 68,
"alpha_frac": 0.490785645,
"autogenerated": false,
"ratio": 4.260330578512397,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5251116223512396,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from uuid import uuid1
from codenode.external.jsonrpc._json import loads, dumps
from codenode.external.jsonrpc.exceptions import *
from codenode.external.jsonrpc.types import *
empty_dec = lambda f: f
try:
from django.views.decorators.csrf import csrf_exempt
except (NameError, ImportError):
csrf_exempt = empty_dec
NoneType = type(None)
encode_kw = lambda p: dict([(str(k), v) for k, v in p.iteritems()])
def encode_kw11(p):
if not type(p) is dict:
return {}
ret = p.copy()
removes = []
for k, v in ret.iteritems():
try:
int(k)
except ValueError:
pass
else:
removes.append(k)
for k in removes:
ret.pop(k)
return ret
def encode_arg11(p):
if type(p) is list:
return p
elif not type(p) is dict:
return []
else:
pos = []
d = encode_kw(p)
for k, v in d.iteritems():
try:
pos.append(int(k))
except ValueError:
pass
pos = list(set(pos))
pos.sort()
return [d[str(i)] for i in pos]
def validate_params(method, D):
if type(D['params']) == Object:
keys = method.json_arg_types.keys()
if len(keys) != len(D['params']):
raise InvalidParamsError('Not eough params provided for %s' % method.json_sig)
for k in keys:
if not k in D['params']:
raise InvalidParamsError('%s is not a valid parameter for %s'
% (k, method.json_sig))
if not Any.kind(D['params'][k]) == method.json_arg_types[k]:
raise InvalidParamsError('%s is not the correct type %s for %s'
% (type(D['params'][k]), method.json_arg_types[k], method.json_sig))
elif type(D['params']) == Array:
arg_types = method.json_arg_types.values()
try:
for i, arg in enumerate(D['params']):
if not Any.kind(arg) == arg_types[i]:
raise InvalidParamsError('%s is not the correct type %s for %s'
% (type(arg), arg_types[i], method.json_sig))
except IndexError:
raise InvalidParamsError('Too many params provided for %s' % method.json_sig)
else:
if len(D['params']) != len(arg_types):
raise InvalidParamsError('Not enouh params provided for %s' % method.json_sig)
class JSONRPCSite(object):
"A JSON-RPC Site"
def __init__(self):
self.urls = {}
self.uuid = str(uuid1())
self.version = '1.0'
self.name = 'django-json-rpc'
self.register('system.describe', self.describe)
def register(self, name, method):
self.urls[unicode(name)] = method
def empty_response(self, version='1.0'):
resp = {'id': None}
if version == '1.1':
resp['version'] = version
return resp
if version == '2.0':
resp['jsonrpc'] = version
resp.update({'error': None, 'result': None})
return resp
def validate_get(self, request, method):
encode_get_params = lambda r: dict([(k, v[0] if len(v) == 1 else v)
for k, v in r])
if request.method == 'GET':
method = unicode(method)
if method in self.urls and getattr(self.urls[method], 'json_safe', False):
D = {
'params': encode_get_params(request.GET.lists()),
'method': method,
'id': 'jsonrpc',
'version': '1.1'
}
return True, D
return False, {}
def response_dict(self, request, D, is_batch=False, version_hint='1.0'):
version = version_hint
response = self.empty_response(version=version)
apply_version = {'2.0': lambda f, r, p: f(r, **encode_kw(p)) if type(p) is dict else f(r, *p),
'1.1': lambda f, r, p: f(r, *encode_arg11(p), **encode_kw(encode_kw11(p))),
'1.0': lambda f, r, p: f(r, *p)}
try:
if 'method' not in D or 'params' not in D:
raise InvalidParamsError('Request requires str:"method" and list:"params"')
if D['method'] not in self.urls:
raise MethodNotFoundError('Method not found. Available methods: %s' % (
'\n'.join(self.urls.keys())))
if 'jsonrpc' in D:
if str(D['jsonrpc']) not in apply_version:
raise InvalidRequestError('JSON-RPC version %s not supported.' % D['jsonrpc'])
version = request.jsonrpc_version = response['jsonrpc'] = str(D['jsonrpc'])
elif 'version' in D:
if str(D['version']) not in apply_version:
raise InvalidRequestError('JSON-RPC version %s not supported.' % D['version'])
version = request.jsonrpc_version = response['version'] = str(D['version'])
else:
request.jsonrpc_version = '1.0'
method = self.urls[str(D['method'])]
if getattr(method, 'json_validate', False):
validate_params(method, D)
R = apply_version[version](method, request, D['params'])
assert sum(map(lambda e: isinstance(R, e),
(dict, str, unicode, int, long, list, set, NoneType, bool))), \
"Return type not supported"
if 'id' in D and D['id'] is not None: # regular request
response['result'] = R
response['id'] = D['id']
if version == '1.1' and 'error' in response:
response.pop('error')
elif is_batch: # notification, not ok in a batch format, but happened anyway
raise InvalidRequestError
else: # notification
return None, 204
status = 200
except Error, e:
response['error'] = e.json_rpc_format
if version == '1.1' and 'result' in response:
response.pop('result')
status = e.status
except Exception, e:
# exception missed by others
other_error = OtherError(e)
response['error'] = other_error.json_rpc_format
status = other_error.status
if version == '1.1' and 'result' in response:
response.pop('result')
return response, status
@csrf_exempt
def dispatch(self, request, method=''):
from django.http import HttpResponse
from django.core.serializers.json import DjangoJSONEncoder
try:
# in case we do something json doesn't like, we always get back valid json-rpc response
response = self.empty_response()
if request.method.lower() == 'get':
valid, D = self.validate_get(request, method)
if not valid:
raise InvalidRequestError('The method you are trying to access is '
'not availble by GET requests')
elif not request.method.lower() == 'post':
raise RequestPostError
else:
try:
D = loads(request.raw_post_data)
except:
raise InvalidRequestError
if type(D) is list:
response = [self.response_dict(request, d, is_batch=True)[0] for d in D]
status = 200
else:
response, status = self.response_dict(request, D)
if response is None and (not u'id' in D or D[u'id'] is None): # a notification
return HttpResponse('', status=status)
json_rpc = dumps(response, cls=DjangoJSONEncoder)
except Error, e:
response['error'] = e.json_rpc_format
status = e.status
json_rpc = dumps(response, cls=DjangoJSONEncoder)
except Exception, e:
# exception missed by others
other_error = OtherError(e)
response['result'] = None
response['error'] = other_error.json_rpc_format
status = other_error.status
json_rpc = dumps(response,cls=DjangoJSONEncoder)
return HttpResponse(json_rpc, status=status, content_type='application/json-rpc')
def procedure_desc(self, key):
M = self.urls[key]
return {
'name': M.json_method,
'summary': M.__doc__,
'idempotent': M.json_safe,
'params': [{'type': str(Any.kind(t)), 'name': k}
for k, t in M.json_arg_types.iteritems()],
'return': {'type': str(M.json_return_type)}}
def service_desc(self):
return {
'sdversion': '1.0',
'name': self.name,
'id': 'urn:uuid:%s' % str(self.uuid),
'summary': self.__doc__,
'version': self.version,
'procs': [self.procedure_desc(k)
for k in self.urls.iterkeys()
if self.urls[k] != self.describe]}
def describe(self, request):
return self.service_desc()
jsonrpc_site = JSONRPCSite()
| {
"repo_name": "regmi/codenode-unr",
"path": "codenode/external/jsonrpc/site.py",
"copies": "1",
"size": "8195",
"license": "bsd-3-clause",
"hash": -977998780124975600,
"line_mean": 32.7242798354,
"line_max": 98,
"alpha_frac": 0.5954850519,
"autogenerated": false,
"ratio": 3.6085424922941436,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4704027544194143,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from uuid import uuid1
from types import NoneType
from django.http import HttpResponse
from jsonrpc._json import loads, dumps
from jsonrpc.exceptions import *
empty_dec = lambda f: f
try:
from django.views.decorators.csrf import csrf_exempt
except (NameError, ImportError):
csrf_exempt = empty_dec
encode_kw = lambda p: dict([(str(k), v) for k, v in p.iteritems()])
def encode_kw11(p):
if not type(p) is dict:
return {}
ret = p.copy()
removes = []
for k, v in ret.iteritems():
try:
int(k)
except ValueError:
pass
else:
removes.append(k)
for k in removes:
ret.pop(k)
return ret
def encode_arg11(p):
if type(p) is list:
return p
elif not type(p) is dict:
return []
else:
pos = []
d = encode_kw(p)
for k, v in d.iteritems():
try:
pos.append(int(k))
except ValueError:
pass
pos = list(set(pos))
pos.sort()
return [d[str(i)] for i in pos]
class JSONRPCSite(object):
def __init__(self):
self.urls = {}
self.register('system.describe', self.describe)
def register(self, name, method):
self.urls[unicode(name)] = method
self.uuid = str(uuid1())
self.version = '1.0'
self.name = 'django-json-rpc'
def empty_response(self, version='1.0'):
resp = {'id': None}
if version == '1.1':
resp['version'] = version
return resp
if version == '2.0':
resp['jsonrpc'] = version
resp.update({'error': None, 'result': None})
return resp
def validate_get(self, request, method):
encode_get_params = lambda r: dict([(k, v[0] if len(v) == 1 else v)
for k, v in r])
if request.method == 'GET':
method = unicode(method)
if method in self.urls and getattr(self.urls[method], 'json_safe', False):
D = {
'params': encode_get_params(request.GET.lists()),
'method': method,
'id': 'jsonrpc',
'version': '1.1'
}
return True, D
return False, {}
def response_dict(self, request, D, is_batch=False, version_hint='1.0'):
version = version_hint
response = self.empty_response(version=version)
apply_version = {'2.0': lambda f, r, p: f(r, **encode_kw(p)) if type(p) is dict else f(r, *p),
'1.1': lambda f, r, p: f(r, *encode_arg11(p), **encode_kw(encode_kw11(p))),
'1.0': lambda f, r, p: f(r, *p)}
try:
if 'method' not in D or 'params' not in D:
raise InvalidParamsError('Request requires str:"method" and list:"params"')
if D['method'] not in self.urls:
raise MethodNotFoundError('Method not found. Available methods: %s' % (
'\n'.join(self.urls.keys())))
if 'jsonrpc' in D:
if str(D['jsonrpc']) not in apply_version:
raise InvalidRequestError('JSON-RPC version %s not supported.' % D['jsonrpc'])
version = request.jsonrpc_version = response['jsonrpc'] = str(D['jsonrpc'])
elif 'version' in D:
if str(D['version']) not in apply_version:
raise InvalidRequestError('JSON-RPC version %s not supported.' % D['version'])
version = request.jsonrpc_version = response['version'] = str(D['version'])
else:
request.jsonrpc_version = '1.0'
R = apply_version[version](self.urls[str(D['method'])], request, D['params'])
assert sum(map(lambda e: isinstance(R, e),
(dict, str, unicode, int, long, list, set, NoneType, bool))), \
"Return type not supported"
if 'id' in D and D['id'] is not None: # regular request
response['result'] = R
response['id'] = D['id']
if version == '1.1' and 'error' in response:
response.pop('error')
elif is_batch: # notification, not ok in a batch format, but happened anyway
raise InvalidRequestError
else: # notification
return None, 204
status = 200
except Error, e:
response['error'] = e.json_rpc_format
if version == '1.1' and 'result' in response:
response.pop('result')
status = e.status
except Exception, e:
# exception missed by others
other_error = OtherError(e)
response['error'] = other_error.json_rpc_format
status = other_error.status
if version == '1.1' and 'result' in response:
response.pop('result')
return response, status
@csrf_exempt
def dispatch(self, request, method=''):
from django.core.serializers.json import DjangoJSONEncoder
try:
# in case we do something json doesn't like, we always get back valid json-rpc response
response = self.empty_response()
if request.method.lower() == 'get':
valid, D = self.validate_get(request, method)
if not valid:
raise InvalidRequestError('The method you are trying to access is '
'not availble by GET requests')
elif not request.method.lower() == 'post':
raise RequestPostError
else:
try:
D = loads(request.raw_post_data)
except:
raise InvalidRequestError
if type(D) is list:
response = [self.response_dict(request, d, is_batch=True)[0] for d in D]
status = 200
else:
response, status = self.response_dict(request, D)
if response is None and (not u'id' in D or D[u'id'] is None): # a notification
return HttpResponse('', status=status)
json_rpc = dumps(response, cls=DjangoJSONEncoder)
except Error, e:
response['error'] = e.json_rpc_format
status = e.status
json_rpc = dumps(response, cls=DjangoJSONEncoder)
except Exception, e:
# exception missed by others
other_error = OtherError(e)
response['result'] = None
response['error'] = other_error.json_rpc_format
status = other_error.status
json_rpc = dumps(response,cls=DjangoJSONEncoder)
return HttpResponse(json_rpc, status=status, content_type='application/json-rpc')
def procedure_desc(self, key):
M = self.urls[key]
return {
'name': M.json_method,
'summary': M.__doc__,
'idempotent': M.json_safe,
'params': M.json_args,
'return': {'type': M.json_return_type}}
def service_desc(self):
return {
'sdversion': '1.0',
'name': self.name,
'id': 'urn:uuid:%s' % str(self.uuid),
'summary': self.__doc__,
'version': self.version,
'procs': [self.procedure_desc(k)
for k in self.urls.iterkeys()
if self.urls[k] != self.describe]}
def describe(self, request):
return self.service_desc()
jsonrpc_site = JSONRPCSite()
| {
"repo_name": "edisonlz/fruit",
"path": "web_project/base/site-packages/jsonrpc/site.py",
"copies": "1",
"size": "6821",
"license": "apache-2.0",
"hash": -2980159656906120700,
"line_mean": 31.327014218,
"line_max": 98,
"alpha_frac": 0.5837853687,
"autogenerated": false,
"ratio": 3.689021092482423,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9578216703448674,
"avg_score": 0.038917951546749784,
"num_lines": 211
} |
from functools import wraps
from uuid import uuid4
from ascetic import interfaces, utils
class BaseTransaction(interfaces.ITransaction):
def __init__(self, db_accessor):
self._db = db_accessor
def parent(self):
return None
def can_reconnect(self):
return False
def set_autocommit(self, autocommit):
raise Exception("You cannot set autocommit during a managed transaction!")
def is_null(self):
return True
class Transaction(BaseTransaction):
def begin(self):
self._db().begin()
def commit(self):
self._db().commit()
def rollback(self):
self._db().rollback()
class SavePoint(BaseTransaction):
def __init__(self, db_accessor, parent, name=None):
BaseTransaction.__init__(self, db_accessor)
self._parent = parent
self._name = name or 's' + uuid4().hex
def parent(self):
return self._parent
def begin(self):
self._db().begin_savepoint(self._name)
def commit(self):
self._db().commit_savepoint(self._name)
def rollback(self):
self._db().rollback_savepoint(self._name)
class DummyTransaction(BaseTransaction):
def begin(self):
pass
def commit(self):
pass
def rollback(self):
pass
def can_reconnect(self):
return True
def set_autocommit(self, autocommit):
self._db().set_autocommit(autocommit)
def is_null(self):
return True
class TransactionManager(interfaces.ITransactionManager):
"""
:type identity_map: ascetic.interfaces.IIdentityMap
"""
def __init__(self, db_accessor, autocommit):
"""
:type db_accessor:
:type autocommit:
"""
self._db = db_accessor
self._current = None
self._autocommit = autocommit
self._disposable = self._subscribe(self._db())
def __call__(self, func=None):
if func is None:
return self
@wraps(func)
def _decorated(*a, **kw):
with self:
rv = func(*a, **kw)
return rv
return _decorated
def __enter__(self):
self.begin()
def __exit__(self, exc_type, exc_val, exc_tb):
try:
if exc_type:
self.rollback()
else:
try:
self.commit()
except:
self.rollback()
raise
finally:
pass
def current(self, node=utils.Undef):
if node is utils.Undef:
return self._current or DummyTransaction(self._db)
self._current = node
def begin(self):
if self._current is None:
self.current().set_autocommit(False)
self.current(Transaction(self._db))
else:
self.current(SavePoint(self._db, self.current()))
self.current().begin()
return
def commit(self):
self.current().commit()
self.current(self.current().parent())
def rollback(self):
self.current().rollback()
self.current(self.current().parent())
def can_reconnect(self):
return self.current().can_reconnect()
def autocommit(self, autocommit=None):
if autocommit is None:
return self._autocommit and not self._current
self._autocommit = autocommit
self.current().set_autocommit(autocommit)
def _subscribe(self, subject):
return subject.observed().attach('connect', self._on_connect)
def _on_connect(self, subject, aspect):
self._current = None
self.current().set_autocommit(self._autocommit)
| {
"repo_name": "emacsway/ascetic",
"path": "ascetic/transaction.py",
"copies": "1",
"size": "3687",
"license": "mit",
"hash": 8871807221768578000,
"line_mean": 23.2565789474,
"line_max": 82,
"alpha_frac": 0.5725522105,
"autogenerated": false,
"ratio": 4.332549941245594,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5405102151745593,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from uuid import uuid4
from app.globals import get_session_store
from app.utilities.schema import load_schema_from_session_data
def with_schema(function):
"""Adds the survey schema as the first argument to the function being wrapped.
Use on flask request handlers or methods called by flask request handlers.
May error unless there is a `current_user`, so should be used as follows e.g.
```python
@login_required
@with_schema
@full_routing_path_required
def get_block(routing_path, schema, *args):
...
```
"""
@wraps(function)
def wrapped_function(*args, **kwargs):
session_data = get_session_store().session_data
schema = load_schema_from_session_data(session_data)
return function(schema, *args, **kwargs)
return wrapped_function
def get_group_instance_id(schema, answer_store, location, answer_instance=0):
"""Return a group instance_id if required, or None if not"""
if not schema.location_requires_group_instance(location):
return None
dependent_drivers = schema.get_group_dependencies(location.group_id)
if dependent_drivers:
return _get_dependent_group_instance(schema, dependent_drivers, answer_store, location.group_instance)
existing_answers = []
if location.group_id in schema.get_group_dependencies_group_drivers() or \
location.block_id in schema.get_group_dependencies_group_drivers():
group_answer_ids = schema.get_answer_ids_for_group(location.group_id)
existing_answers = answer_store.filter(answer_ids=group_answer_ids, group_instance=location.group_instance)
if location.block_id in schema.get_group_dependencies_block_drivers():
block_answer_ids = schema.get_answer_ids_for_block(location.block_id)
existing_answers = answer_store.filter(answer_ids=block_answer_ids, answer_instance=answer_instance)
# If there are existing answers with a group_instance_id
existing_answers_with_group_instance_id = [answer for answer in existing_answers if answer.get('group_instance_id')]
if existing_answers_with_group_instance_id:
return existing_answers_with_group_instance_id[0]['group_instance_id']
return str(uuid4())
def _get_dependent_group_instance(schema, dependent_drivers, answer_store, group_instance):
group_instance_ids = []
for driver_id in dependent_drivers:
if driver_id in schema.get_group_dependencies_group_drivers():
if schema.get_group(driver_id):
driver_answer_ids = schema.get_answer_ids_for_group(driver_id)
group_instance_ids.extend(_get_group_instance_ids_for_group(answer_store, driver_answer_ids))
else:
driver_answer_ids = schema.get_answer_ids_for_block(driver_id)
group_instance_ids.extend(_get_group_instance_ids_for_group(answer_store, driver_answer_ids))
if driver_id in schema.get_group_dependencies_block_drivers():
driver_answer_ids = schema.get_answer_ids_for_block(driver_id)
group_instance_ids.extend(_get_group_instance_ids_for_block(answer_store, driver_answer_ids))
return group_instance_ids[group_instance]
def _get_group_instance_ids_for_group(answer_store, group_answer_ids):
group_instance_ids = []
group_instances = 0
for answer in list(answer_store.filter(answer_ids=group_answer_ids)):
group_instances = max(group_instances, answer['group_instance'])
for i in range(group_instances + 1):
answers = list(answer_store.filter(answer_ids=group_answer_ids, group_instance=i))
if answers:
group_instance_ids.append(answers[0]['group_instance_id'])
return group_instance_ids
def _get_group_instance_ids_for_block(answer_store, block_answer_ids):
group_instance_ids = []
answer_instances = 0
for answer in list(answer_store.filter(answer_ids=block_answer_ids)):
answer_instances = max(answer_instances, answer['answer_instance'])
for i in range(answer_instances + 1):
answers = list(answer_store.filter(answer_ids=block_answer_ids, answer_instance=i))
if answers:
group_instance_ids.append(answers[0]['group_instance_id'])
return group_instance_ids
| {
"repo_name": "ONSdigital/eq-survey-runner",
"path": "app/helpers/schema_helpers.py",
"copies": "1",
"size": "4296",
"license": "mit",
"hash": 7786325956239812000,
"line_mean": 40.7087378641,
"line_max": 120,
"alpha_frac": 0.697858473,
"autogenerated": false,
"ratio": 3.732406602953953,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9924456546342654,
"avg_score": 0.0011617059222599113,
"num_lines": 103
} |
from functools import wraps
from uuid import UUID
from ipware.ip import get_ip
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.decorators import available_attrs
from django.utils.six.moves.urllib.parse import urlparse
from django.shortcuts import resolve_url, redirect
from fir_irma.models import IrmaScan
from fir_irma.utils import process_error, ERROR_NOT_FOUND, ERROR_UNAUTHORIZED
def user_is_owner_or_privileged(login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user is the owner of the scan or privileged,,
redirecting to the log-in page if necessary. The request must have a scan_id parameter.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated():
if 'scan_id' in kwargs:
scan_id = UUID(kwargs.get('scan_id'))
try:
scan = IrmaScan.objects.get(irma_scan=scan_id)
except IrmaScan.DoesNotExist:
return process_error(request, error=ERROR_NOT_FOUND)
if (request.user == scan.user and request.user.has_perm('fir_irma.scan_files')) or \
request.user.has_perm('fir_irma.read_all_results'):
kwargs['scan'] = scan
return view_func(request, *args, **kwargs)
elif settings.IRMA_ANONYMOUS_SCAN and settings.IRMA_IS_STANDALONE:
if 'scan_id' in kwargs:
scan_id = UUID(kwargs.get('scan_id'))
client_ip = get_ip(request)
try:
scan = IrmaScan.objects.get(irma_scan=scan_id, client_ip=client_ip)
kwargs['scan'] = scan
return view_func(request, *args, **kwargs)
except IrmaScan.DoesNotExist:
return process_error(request, error=ERROR_NOT_FOUND)
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
path, resolved_login_url, redirect_field_name)
_wrapped_view.csrf_exempt = True
return _wrapped_view
return decorator
def login_and_perm_required(perm, login_url=None, unprivileged_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user is authenticated and has permission,
redirecting to the log-in page if necessary.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated():
if not isinstance(perm, (list, tuple)):
perms = (perm, )
else:
perms = perm
if request.user.has_perms(perms):
return view_func(request, *args, **kwargs)
if unprivileged_url is not None:
return redirect(unprivileged_url)
return process_error(request, error=ERROR_UNAUTHORIZED)
elif settings.IRMA_ANONYMOUS_SCAN and settings.IRMA_IS_STANDALONE:
return view_func(request, *args, **kwargs)
else:
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
path, resolved_login_url, redirect_field_name)
_wrapped_view.csrf_exempt = True
return _wrapped_view
return decorator
| {
"repo_name": "gcrahay/fir_irma_plugin",
"path": "fir_irma/decorators.py",
"copies": "1",
"size": "4916",
"license": "apache-2.0",
"hash": 1218710751414903000,
"line_mean": 49.1632653061,
"line_max": 114,
"alpha_frac": 0.5921480879,
"autogenerated": false,
"ratio": 4.201709401709402,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0010009568170383234,
"num_lines": 98
} |
from functools import wraps
from voluptuous import Schema, Required, Any, All, Length, Range
def validate_input(function):
"""Decorator that validates the kwargs of the function passed to it."""
@wraps(function)
def wrapper(*args, **kwargs):
try:
name = function.__name__ + '_validator' # find validator name
globals()[name](kwargs) # call validation function
return function(*args, **kwargs)
except KeyError:
raise Exception("Could not find validation schema for the"
" function " + function.__name__)
return wrapper
create_user_validator = Schema({
Required('user_id'): basestring,
'roles': [Any('user', 'superuser')],
'netmask': basestring,
'secret': All(basestring, Length(min=8, max=64)),
'pubkey': basestring
})
update_user_validator = Schema({
Required('user_id'): basestring,
'roles': [Any('user', 'superuser')],
'netmask': basestring,
'secret': All(basestring, Length(min=8, max=64)),
'pubkey': basestring
})
create_pos_validator = Schema({
Required('name'): basestring,
Required('pos_type'): basestring,
Required('pos_id'): basestring,
'location': {'latitude': float,
'longitude': float,
'accuracy': float}
})
create_shortlink_validator = Schema({
'callback_uri': basestring,
'description': basestring,
'serial_number': basestring
})
update_pos_validator = Schema({
Required('pos_id'): basestring,
Required('name'): basestring,
Required('pos_type'): basestring,
'location': {'latitude': float,
'longitude': float,
'accuracy': float}
})
create_payment_request_validator = Schema({
'ledger': basestring,
'display_message_uri': basestring,
'callback_uri': basestring,
Required('customer'): All(basestring, Length(max=100)),
Required('currency'): All(basestring, Length(min=3, max=3)),
Required('amount'): basestring,
'additional_amount': basestring,
'required_scope': basestring,
'required_scope_text': basestring,
'additional_edit': bool,
Required('allow_credit'): bool,
Required('pos_id'): basestring,
Required('pos_tid'): basestring,
'text': basestring,
Required('action'): Any('auth', 'sale', 'AUTH', 'SALE'),
Required('expires_in'): All(int, Range(min=0, max=2592000)),
'links': [{'uri': basestring, 'caption': basestring, 'show_on': [Any('pending', 'fail', 'ok')]}],
'line_items': Any([{
Required('product_id'): basestring,
'vat': basestring,
'metadata': Any([{'key': basestring, 'value': basestring}], None),
'description': basestring,
'vat_rate': basestring,
Required('total'): basestring,
'tags': [{
Required('tag_id'): basestring,
Required('label'): basestring,
}],
Required('item_cost'): basestring,
Required('quantity'): basestring,
}], None)
})
update_payment_request_validator = Schema({
'tid': basestring,
'ledger': basestring,
'display_message_uri': basestring,
'callback_uri': basestring,
'currency': All(basestring, Length(min=3, max=3)),
'amount': basestring,
'additional_amount': basestring,
'required_scope': basestring,
'required_scope_text': basestring,
'capture_id': basestring,
'refund_id': basestring,
'text': basestring,
'action': Any('reauth', 'capture', 'abort', 'release', 'refund',
'REAUTH', 'CAPTURE', 'ABORT', 'RELEASE', 'REFUND'),
'line_items': Any([{
Required('product_id'): basestring,
'vat': basestring,
'metadata': Any([{'key': basestring, 'value': basestring}], None),
'description': basestring,
'vat_rate': basestring,
Required('total'): basestring,
'tags': [{
Required('tag_id'): basestring,
Required('label'): basestring,
}],
Required('item_cost'): basestring,
Required('quantity'): basestring,
}], None)
})
update_ticket_validator = Schema({
Required('tid'): basestring,
'tickets': list,
})
update_shortlink_validator = Schema({
Required('shortlink_id'): basestring,
'callback_uri': basestring,
'description': basestring
})
create_ledger_validator = Schema({
Required('currency'): basestring,
'description': basestring
})
update_ledger_validator = Schema({
Required('ledger_id'): basestring,
'description': basestring
})
close_report_validator = Schema({
Required('ledger_id'): basestring,
Required('report_id'): basestring,
'callback_uri': basestring,
})
create_permission_request_validator = Schema({
'ledger': basestring,
Required('customer'): All(basestring, Length(max=100)),
Required('pos_id'): basestring,
Required('pos_tid'): basestring,
'text': basestring,
'callback_uri': basestring,
Required('scope'): basestring,
'expires_in': All(int, Range(min=0, max=2592000)),
})
| {
"repo_name": "mcash/merchant-api-python-sdk",
"path": "mcash/mapi_client/validation.py",
"copies": "1",
"size": "5048",
"license": "mit",
"hash": 3893830095458989600,
"line_mean": 30.748427673,
"line_max": 101,
"alpha_frac": 0.6109350238,
"autogenerated": false,
"ratio": 4.100731112916328,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0002522449803248915,
"num_lines": 159
} |
from functools import wraps
from weakref import WeakValueDictionary, ref
from contextlib import contextmanager
from blinker import Signal
from ..exceptions import Cancel
class LockedSignal(Signal):
_lock = None
def _locked_super_method(self, name, *args, **kwargs):
try:
if self._lock:
self._lock.acquire()
try:
return getattr(super(LockedSignal, self), name)(*args, **kwargs)
finally:
if self._lock:
self._lock.release()
except Exception, e:
e.signal = self
raise
def connect(self, *args, **kwargs):
return self._locked_super_method('connect', *args, **kwargs)
def disconnect(self, *args, **kwargs):
return self._locked_super_method('disconnect', *args, **kwargs)
def send(self, *args, **kwargs):
return self._locked_super_method('send', *args, **kwargs)
class NamedSignal(LockedSignal):
"""A named generic notification emitter."""
def __init__(self, name, doc=None):
LockedSignal.__init__(self, doc)
#: The name of this signal.
self.name = name
def __repr__(self):
base = Signal.__repr__(self)
return "%s; %r>" % (base[:-1], self.name)
__sigstore__ = []
class Namespace(WeakValueDictionary):
"""A mapping of signal names to signals."""
def _wraps_signal_method(self, name, proxy=None, **kwargs):
method = getattr(self.signal(name, **kwargs), proxy)
@wraps(method)
def proxy(func):
return method(func)
return method
def signal(self, name, doc=None):
"""Return the :class:`NamedSignal` *name*, creating it if required.
Repeated calls to this function will return the same signal object.
"""
try:
return self[name]
except KeyError:
signal = NamedSignal(name, doc)
__sigstore__.append(signal)
return self.setdefault(name, signal)
def connect(self, name, **kwargs):
return self._wraps_signal_method(name, proxy='connect', **kwargs)
def disconnect(self):
for signal in self.itervalues():
for reciever in signal.receivers.values():
signal.disconnect(reciever())
def pprint(self):
for name, signal in self.items():
print name
for _id, receiver_ref in signal.receivers.items():
print '\t', receiver_ref()
swarm = Namespace()
atom = Namespace()
def disconnect():
swarm.disconnect()
atom.disconnect()
transport.disconnect()
def cancel(results):
for handler, result in results:
if isinstance(result, Cancel):
result.log()
return True
return False | {
"repo_name": "denz/swarm",
"path": "swarm/signals/base.py",
"copies": "1",
"size": "2800",
"license": "bsd-3-clause",
"hash": 2673548018904386600,
"line_mean": 27.01,
"line_max": 80,
"alpha_frac": 0.5864285714,
"autogenerated": false,
"ratio": 4.2682926829268295,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.004271788474879336,
"num_lines": 100
} |
from functools import wraps
from werkzeug.exceptions import BadRequest
from flask import request, jsonify
def body_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
try:
# Set Force True to allow integration easily with any frontend.
data = request.get_json(force=True)
except BadRequest:
return jsonify(
error={
'message': 'Viperid only accept POST method with data json.'
},
), 400
code = data.get('code', '')
if not code:
return jsonify(
error={
'message': 'Code field is empty'
}
), 400
return f(code, *args, **kwargs)
return decorated_function
def get_error(e: Exception):
message = ''
if hasattr(e, 'msg'):
message = e.msg
elif hasattr(e, 'message'):
message = e.message
elif hasattr(e, 'args'):
message = e.args[0]
return {
'message': message,
'text': e.text if hasattr(e, 'text') else '',
'line_no': e.lineno if hasattr(e, 'lineno') else '',
'source_code': e.source_code if hasattr(e, 'source_code') else []
}
| {
"repo_name": "yograterol/viperid",
"path": "backend/viperid/utils.py",
"copies": "1",
"size": "1246",
"license": "mit",
"hash": 8851705519647142000,
"line_mean": 27.3181818182,
"line_max": 80,
"alpha_frac": 0.5288924559,
"autogenerated": false,
"ratio": 4.223728813559322,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5252621269459321,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from werkzeug.exceptions import Forbidden
def import_user():
try:
from flask.ext.login import current_user
return current_user
except ImportError:
raise ImportError(
'User argument not passed and Flask-Login current_user could not be imported.')
def user_has(ability, get_user=import_user):
"""
Takes an ability (a string name of either a role or an ability) and returns the function if the user has that ability
"""
def wrapper(func):
@wraps(func)
def inner(*args, **kwargs):
from .models import Ability
desired_ability = Ability.query.filter_by(
name=ability).first()
user_abilities = []
current_user = get_user()
for role in current_user._roles:
user_abilities += role.abilities
if desired_ability in user_abilities:
return func(*args, **kwargs)
else:
raise Forbidden("You do not have access")
return inner
return wrapper
def user_is(role, get_user=import_user):
"""
Takes an role (a string name of either a role or an ability) and returns the function if the user has that role
"""
def wrapper(func):
@wraps(func)
def inner(*args, **kwargs):
from .models import Role
current_user = get_user()
if role in current_user.roles:
return func(*args, **kwargs)
raise Forbidden("You do not have access")
return inner
return wrapper
| {
"repo_name": "compiteing/flask-ponypermission",
"path": "decorators.py",
"copies": "1",
"size": "1604",
"license": "mit",
"hash": 8791429048777255000,
"line_mean": 32.4166666667,
"line_max": 121,
"alpha_frac": 0.5947630923,
"autogenerated": false,
"ratio": 4.556818181818182,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0010397749667971116,
"num_lines": 48
} |
from functools import wraps
from werkzeug import routing
from werkzeug.routing import BaseConverter
from . import utils
def rule_dispatcher(rule, request, response):
if getattr(rule, '_steinie_dispatchable', False):
request.original_path = request.path
request.path = request.original_path.replace(rule.bound_prefix, '')
return rule.dispatch(request, response)
try:
return rule(request, response)
except TypeError:
return rule(request)
class Rule(routing.Rule):
def __init__(self, *args, **kwargs):
self._steinie_dispatchable = True
self.func = kwargs.pop('func', None)
self.router = kwargs.pop('router', None)
super(Rule, self).__init__(*args, **kwargs)
self.bound_prefix = None
def dispatch(self, request, response):
return utils.wrap_middleware_around_route(
self.router.middleware,
self.func,
self.router
)(request, response)
def empty(self):
rule = super(Rule, self).empty()
rule.func = self.func
rule.router = self.router
return rule
class EndpointPrefix(routing.EndpointPrefix):
pass
class Submount(routing.Submount):
def get_rules(self, map):
for rule in super(Submount, self).get_rules(map):
rule.bound_prefix = self.path
key = rule.rule
if not key.startswith('/'):
key = '/' + key
map.router.routes[key] = rule
yield rule
class Map(routing.Map):
def __init__(self, router=None, *args, **kwargs):
self.router = router
super(Map, self).__init__(*args, **kwargs)
class Router(object):
def __init__(self):
self.map = Map(self)
self.converters = {}
self.routes = {}
self.middleware = []
def handle(self, request, response):
urls = self.map.bind_to_environ(request.environ)
endpoint, params = urls.match(request.path)
# All endpoints should start with a slash
if not endpoint[0].startswith('/'):
endpoint = '/' + endpoint
request.params = params
rule = self.routes[endpoint]
print("dispatching %s" % rule)
return rule_dispatcher(rule, request, response)
def method(self, route, methods=None):
def outer(fn):
self.routes[route] = fn
rule = Rule(route, endpoint=route, methods=methods, func=fn,
router=self)
self.map.add(rule)
@wraps(fn)
def inner(*args, **kwargs):
return fn(*args, **kwargs)
return inner
return outer
def post(self, route):
return self.method(route, methods=['POST', ])
def get(self, route):
return self.method(route, methods=['GET', ])
def delete(self, route):
return self.method(route, methods=['DELETE', ])
def head(self, route):
return self.method(route, methods=['HEAD', ])
def info(self, route):
return self.method(route, methods=['INFO', ])
def options(self, route):
return self.method(route, methods=['OPTIONS', ])
def patch(self, route):
return self.method(route, methods=['PATCH'])
def put(self, route):
return self.method(route, methods=['PUT'])
def trace(self, route):
return self.method(route, methods=['TRACE'])
def param(self, name):
def outer(fn):
class BasicParameter(BaseConverter):
def to_python(self, value):
return fn(value)
self.map.converters[name] = BasicParameter
self.converters[name] = fn
@wraps(fn)
def inner(*args, **kwargs):
return fn(*args, **kwargs)
return inner
return outer
def use(self, *args):
if len(args) == 1:
self.middleware.append(args[0])
if len(args) == 2:
route, router = args
self.add_router(route, router)
def add_router(self, route, router):
if route.startswith('/'):
route = route[1:]
submount = route
if not submount.startswith('/'):
submount = '/' + submount
rules = [a for a in router.map.iter_rules()]
mount = EndpointPrefix(route, [Submount(submount, rules)])
for name, fn in router.converters.items():
self.param(name)(fn)
self.map.add(mount)
| {
"repo_name": "tswicegood/steinie",
"path": "steinie/routing.py",
"copies": "1",
"size": "4506",
"license": "apache-2.0",
"hash": 2342661665469464600,
"line_mean": 27.7006369427,
"line_max": 75,
"alpha_frac": 0.5690190857,
"autogenerated": false,
"ratio": 4.133944954128441,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.520296403982844,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from werkzeug import secure_filename, escape
from flask import g, session, redirect, url_for
from forms import LoginForm, SearchForm, DeleteForm, ExportForm, ImportForm
def login_required(f):
"view decorator that redirects nonauthenticated users to index"
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get('username', None) is None:
return redirect(url_for('index'))
return f(*args, **kwargs)
return decorated_function
def create_forms():
g.login_form = LoginForm()
g.search_form = SearchForm()
g.delete_form = DeleteForm()
g.export_form = ExportForm()
g.import_form = ImportForm()
# sets form default to stored filename
if not g.export_form.filename.data and session.get('filename', None):
g.export_form.filename.data = session['filename']
def set_filename(filename):
"gets a secure version of filename, sets it in the session, and returns it."
filename = escape(secure_filename(filename))
if filename.lower().endswith('.lsc'):
filename = filename[:-4]
session['filename'] = filename
return filename
| {
"repo_name": "korylprince/lanschool_class_gen",
"path": "util.py",
"copies": "1",
"size": "1156",
"license": "bsd-3-clause",
"hash": -8575836022403600000,
"line_mean": 36.2903225806,
"line_max": 80,
"alpha_frac": 0.6920415225,
"autogenerated": false,
"ratio": 4.013888888888889,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5205930411388889,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from wraptor import context
class throttle(object):
""" Throttle a function to execute at most 1 time per <seconds> seconds
The function is executed on the forward edge.
"""
def __init__(self, seconds=1, instance_method=False, return_throttle_result=False):
self.throttler = context.throttle(seconds=seconds)
self.seconds = seconds
self.instance_method = instance_method
self.return_throttle_result = return_throttle_result
def __call__(self, fn):
if self.instance_method:
@wraps(fn)
def rewrite_instance_method(instance, *args, **kwargs):
# the first time we are called we overwrite the method
# on the class instance with a new memoize instance
if hasattr(instance, fn.__name__):
bound_fn = fn.__get__(instance, instance.__class__)
new_throttler = throttle(self.seconds)(bound_fn)
setattr(instance, fn.__name__, new_throttler)
return getattr(instance, fn.__name__)(*args, **kwargs)
return rewrite_instance_method
@wraps(fn)
def wrapped(*args, **kwargs):
with self.throttler:
result = fn(*args, **kwargs)
return self.return_throttle_result or result
return False if self.return_throttle_result else None
return wrapped
| {
"repo_name": "carlsverre/wraptor",
"path": "wraptor/decorators/throttle.py",
"copies": "1",
"size": "1460",
"license": "mit",
"hash": -8047561446131984000,
"line_mean": 41.9411764706,
"line_max": 87,
"alpha_frac": 0.597260274,
"autogenerated": false,
"ratio": 4.591194968553459,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0015597147950089125,
"num_lines": 34
} |
from functools import wraps
from xml.etree.ElementTree import XML
import json
import requests
__version__ = "0.6"
ALL = '*'
ENDPOINT_URL = 'http://api.census.gov/data/%s/%s'
DEFINITIONS = {
'acs5': {
'2011': 'http://www.census.gov/developers/data/acs_5yr_2011_var.xml',
'2010': 'http://www.census.gov/developers/data/acs_5yr_2010_var.xml',
},
'acs1/profile': {
'2012': 'http://www.census.gov/developers/data/acs_1yr_profile_2012.xml',
},
'sf1': {
'2010': 'http://www.census.gov/developers/data/sf1.xml',
'2000': 'http://www.census.gov/developers/data/2000_sf1.xml',
'1990': 'http://www.census.gov/developers/data/1990_sf1.xml',
},
'sf3': {
'2000': 'http://www.census.gov/developers/data/2000_sf3.xml',
'1990': 'http://www.census.gov/developers/data/1990_sf3.xml',
},
}
class APIKeyError(Exception):
'''Invalid API Key'''
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def list_or_str(v):
""" Convert a single value into a list.
"""
if isinstance(v, (list, tuple)):
return v
return [v]
def supported_years(*years):
def inner(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
year = kwargs.get('year', self.default_year)
if int(year) not in years:
raise UnsupportedYearException('geography is not available in %s' % year)
return func(self, *args, **kwargs)
return wrapper
return inner
class CensusException(Exception):
pass
class UnsupportedYearException(CensusException):
pass
class Client(object):
def __init__(self, key, year=None, session=None):
self._key = key
self.session = session or requests.session()
if year:
self.default_year = year
@property
def years(self):
return [int(y) for y in DEFINITIONS[self.dataset].keys()]
def fields(self, year, flat=False):
data = {}
fields_url = DEFINITIONS[self.dataset].get(str(year))
if not fields_url:
raise CensusException('%s is not available for %s' % (self.dataset, year))
resp = requests.get(fields_url)
doc = XML(resp.text)
if flat:
for elem in doc.iter('variable'):
data[elem.attrib['name']] = "%s: %s" % (elem.attrib['concept'], elem.text)
else:
for concept_elem in doc.iter('concept'):
concept = concept_elem.attrib['name']
variables = {}
for variable_elem in concept_elem.iter('variable'):
variables[variable_elem.attrib['name']] = variable_elem.text
data[concept] = variables
return data
def get(self, fields, geo, year=None):
if len(fields) > 50:
raise CensusException("only 50 columns per call are allowed")
if year is None:
year = self.default_year
fields = list_or_str(fields)
url = ENDPOINT_URL % (year, self.dataset)
params = {
'get': ",".join(fields),
'for': geo['for'],
'key': self._key,
}
if 'in' in geo:
params['in'] = geo['in']
headers = {
'User-Agent': 'python-census/%s github.com/sunlightlabs/census' % __version__
}
resp = self.session.get(url, params=params, headers=headers)
if resp.status_code == 200:
try:
data = json.loads(resp.text)
except ValueError as ex:
if '<title>Invalid Key</title>' in resp.text:
raise APIKeyError(' '.join(resp.text.splitlines()))
else:
raise ex
headers = data[0]
return [dict(zip(headers, d)) for d in data[1:]]
elif resp.status_code == 204:
return []
else:
raise CensusException(resp.text)
class ACS5Client(Client):
default_year = 2011
dataset = 'acs5'
@supported_years(2011, 2010)
def us(self, fields, **kwargs):
return self.get(fields, geo={'for': 'us:1'}, **kwargs)
@supported_years(2011, 2010)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2011, 2010)
def state_county(self, fields, state_fips, county_fips, **kwargs):
return self.get(fields, geo={
'for': 'county:%s' % county_fips,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2011, 2010)
def state_county_subdivision(self, fields, state_fips, county_fips, subdiv_fips, **kwargs):
return self.get(fields, geo={
'for': 'county subdivision:%s' % subdiv_fips,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2011, 2010)
def state_county_tract(self, fields, state_fips, county_fips, tract, **kwargs):
return self.get(fields, geo={
'for': 'tract:%s' % tract,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2011, 2010)
def state_place(self, fields, state_fips, place, **kwargs):
return self.get(fields, geo={
'for': 'place:%s' % place,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2011, 2010)
def state_district(self, fields, state_fips, district, **kwargs):
return self.get(fields, geo={
'for': 'congressional district:%s' % district,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2011)
def zipcode(self, fields, zcta, **kwargs):
return self.get(fields, geo={
'for': 'zip code tabulation area:%s' % zcta,
}, **kwargs)
class ACS1DpClient(Client):
default_year = 2012
dataset = 'acs1/profile'
@supported_years(2012)
def us(self, fields, **kwargs):
return self.get(fields, geo={'for': 'us:1'}, **kwargs)
@supported_years(2012)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2012)
def state_district(self, fields, state_fips, district, **kwargs):
return self.get(fields, geo={
'for': 'congressional district:%s' % district,
'in': 'state:%s' % state_fips,
}, **kwargs)
class SF1Client(Client):
default_year = 2010
dataset = 'sf1'
@supported_years(2010, 2000, 1990)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010, 2000, 1990)
def state_county(self, fields, state_fips, county_fips, **kwargs):
return self.get(fields, geo={
'for': 'county:%s' % county_fips,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_county_subdivision(self, fields, state_fips, county_fips, subdiv_fips, **kwargs):
return self.get(fields, geo={
'for': 'county subdivision:%s' % subdiv_fips,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2010, 2000, 1990)
def state_county_tract(self, fields, state_fips, county_fips, tract, **kwargs):
return self.get(fields, geo={
'for': 'tract:%s' % tract,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2010, 2000, 1990)
def state_place(self, fields, state_fips, place, **kwargs):
return self.get(fields, geo={
'for': 'place:%s' % place,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_district(self, fields, state_fips, district, **kwargs):
return self.get(fields, geo={
'for': 'congressional district:%s' % district,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_msa(self, fields, state_fips, msa, **kwargs):
return self.get(fields, geo={
'for': 'metropolitan statistical area/micropolitan statistical area:%s' % msa,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_csa(self, fields, state_fips, csa, **kwargs):
return self.get(fields, geo={
'for': 'combined statistical area:%s' % csa,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_district_place(self, fields, state_fips, district, place, **kwargs):
return self.get(fields, geo={
'for': 'place:' % place,
'in': 'state:%s congressional district:%s' % (state_fips, district),
}, **kwargs)
@supported_years(2010)
def state_zipcode(self, fields, state_fips, zcta, **kwargs):
return self.get(fields, geo={
'for': 'zip code tabulation area:%s' % zcta,
'in': 'state:%s' % state_fips,
}, **kwargs)
class SF3Client(Client):
default_year = 2000
dataset = 'sf3'
@supported_years(2000, 1990)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2000, 1990)
def state_county(self, fields, state_fips, county_fips, **kwargs):
return self.get(fields, geo={
'for': 'county:%s' % county_fips,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2000, 1990)
def state_county_tract(self, fields, state_fips, county_fips, tract, **kwargs):
return self.get(fields, geo={
'for': 'tract:%s' % tract,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2000, 1990)
def state_place(self, fields, state_fips, place, **kwargs):
return self.get(fields, geo={
'for': 'place:%s' % place,
'in': 'state:%s' % state_fips,
}, **kwargs)
class Census(object):
ALL = ALL
def __init__(self, key, year=None, session=None):
if not session:
session = requests.session()
self.acs = ACS5Client(key, year, session)
self.acs5 = ACS5Client(key, year, session)
self.acs1dp = ACS1DpClient(key, year, session)
self.sf1 = SF1Client(key, year, session)
self.sf3 = SF3Client(key, year, session)
| {
"repo_name": "UDST/census",
"path": "census/core.py",
"copies": "1",
"size": "10773",
"license": "bsd-3-clause",
"hash": 1579940915145653000,
"line_mean": 29.6051136364,
"line_max": 95,
"alpha_frac": 0.5552770816,
"autogenerated": false,
"ratio": 3.3487721479639414,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44040492295639416,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from xml.etree.ElementTree import XML
import json
import requests
__version__ = "0.7"
ALL = '*'
ENDPOINT_URL = 'http://api.census.gov/data/%s/%s'
DEFINITIONS = {
'acs5': {
'2013': 'http://api.census.gov/data/2013/acs5/variables.xml',
'2012': 'http://api.census.gov/data/2012/acs5/variables.xml',
'2011': 'http://api.census.gov/data/2011/acs5/variables.xml',
'2010': 'http://api.census.gov/data/2010/acs5/variables.xml',
},
'acs1/profile': {
'2012': 'http://api.census.gov/data/2012/acs1/variables.xml',
},
'sf1': {
'2010': 'http://api.census.gov/data/2010/sf1/variables.xml',
'2000': 'http://api.census.gov/data/2000/sf1/variables.xml',
'1990': 'http://api.census.gov/data/1990/sf1/variables.xml',
},
'sf3': {
'2000': 'http://api.census.gov/data/2000/sf3/variables.xml',
'1990': 'http://api.census.gov/data/1990/sf3/variables.xml',
},
}
class APIKeyError(Exception):
'''Invalid API Key'''
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def list_or_str(v):
""" Convert a single value into a list.
"""
if isinstance(v, (list, tuple)):
return v
return [v]
def supported_years(*years):
def inner(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
year = kwargs.get('year', self.default_year)
if int(year) not in years:
raise UnsupportedYearException('geography is not available in %s' % year)
return func(self, *args, **kwargs)
return wrapper
return inner
class CensusException(Exception):
pass
class UnsupportedYearException(CensusException):
pass
class Client(object):
def __init__(self, key, year=None, session=None):
self._key = key
self.session = session or requests.session()
if year:
self.default_year = year
@property
def years(self):
return [int(y) for y in DEFINITIONS[self.dataset].keys()]
def fields(self, year=None, flat=False):
if not year:
year = self.default_year
data = {}
fields_url = DEFINITIONS[self.dataset].get(str(year))
if not fields_url:
raise CensusException('%s is not available for %s' % (self.dataset, year))
resp = requests.get(fields_url)
doc = XML(resp.text)
if flat:
for elem in doc.iter('variable'):
data[elem.attrib['name']] = "%s: %s" % (elem.attrib['concept'], elem.text)
else:
for concept_elem in doc.iter('concept'):
concept = concept_elem.attrib['name']
variables = {}
for variable_elem in concept_elem.iter('variable'):
variables[variable_elem.attrib['name']] = variable_elem.text
data[concept] = variables
return data
def get(self, fields, geo, year=None):
if len(fields) > 50:
raise CensusException("only 50 columns per call are allowed")
if year is None:
year = self.default_year
fields = list_or_str(fields)
url = ENDPOINT_URL % (year, self.dataset)
params = {
'get': ",".join(fields),
'for': geo['for'],
'key': self._key,
}
if 'in' in geo:
params['in'] = geo['in']
headers = {
'User-Agent': 'python-census/%s github.com/sunlightlabs/census' % __version__
}
resp = self.session.get(url, params=params, headers=headers)
if resp.status_code == 200:
try:
data = json.loads(resp.text)
except ValueError as ex:
if '<title>Invalid Key</title>' in resp.text:
raise APIKeyError(' '.join(resp.text.splitlines()))
else:
raise ex
headers = data[0]
return [dict(zip(headers, d)) for d in data[1:]]
elif resp.status_code == 204:
return []
else:
raise CensusException(resp.text)
class ACS5Client(Client):
default_year = 2013
dataset = 'acs5'
@supported_years(2013, 2012, 2011, 2010)
def us(self, fields, **kwargs):
return self.get(fields, geo={'for': 'us:1'}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_county(self, fields, state_fips, county_fips, **kwargs):
return self.get(fields, geo={
'for': 'county:%s' % county_fips,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_county_subdivision(self, fields, state_fips, county_fips, subdiv_fips, **kwargs):
return self.get(fields, geo={
'for': 'county subdivision:%s' % subdiv_fips,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_county_tract(self, fields, state_fips, county_fips, tract, **kwargs):
return self.get(fields, geo={
'for': 'tract:%s' % tract,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_county_blockgroup(self, fields, state_fips, county_fips, blockgroup, **kwargs):
return self.get(fields, geo={
'for': 'block group:%s' % blockgroup,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_place(self, fields, state_fips, place, **kwargs):
return self.get(fields, geo={
'for': 'place:%s' % place,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_district(self, fields, state_fips, district, **kwargs):
return self.get(fields, geo={
'for': 'congressional district:%s' % district,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2013, 2012, 2011)
def zipcode(self, fields, zcta, **kwargs):
return self.get(fields, geo={
'for': 'zip code tabulation area:%s' % zcta,
}, **kwargs)
class ACS1DpClient(Client):
default_year = 2012
dataset = 'acs1/profile'
@supported_years(2012)
def us(self, fields, **kwargs):
return self.get(fields, geo={'for': 'us:1'}, **kwargs)
@supported_years(2012)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2012)
def state_district(self, fields, state_fips, district, **kwargs):
return self.get(fields, geo={
'for': 'congressional district:%s' % district,
'in': 'state:%s' % state_fips,
}, **kwargs)
class SF1Client(Client):
default_year = 2010
dataset = 'sf1'
@supported_years(2010, 2000, 1990)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010, 2000, 1990)
def state_county(self, fields, state_fips, county_fips, **kwargs):
return self.get(fields, geo={
'for': 'county:%s' % county_fips,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_county_subdivision(self, fields, state_fips, county_fips, subdiv_fips, **kwargs):
return self.get(fields, geo={
'for': 'county subdivision:%s' % subdiv_fips,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2010, 2000, 1990)
def state_county_tract(self, fields, state_fips, county_fips, tract, **kwargs):
return self.get(fields, geo={
'for': 'tract:%s' % tract,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2010, 2000, 1990)
def state_county_blockgroup(self, fields, state_fips, county_fips, blockgroup, **kwargs):
return self.get(fields, geo={
'for': 'block group:%s' % blockgroup,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2010, 2000, 1990)
def state_place(self, fields, state_fips, place, **kwargs):
return self.get(fields, geo={
'for': 'place:%s' % place,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_district(self, fields, state_fips, district, **kwargs):
return self.get(fields, geo={
'for': 'congressional district:%s' % district,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_msa(self, fields, state_fips, msa, **kwargs):
return self.get(fields, geo={
'for': 'metropolitan statistical area/micropolitan statistical area:%s' % msa,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_csa(self, fields, state_fips, csa, **kwargs):
return self.get(fields, geo={
'for': 'combined statistical area:%s' % csa,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_district_place(self, fields, state_fips, district, place, **kwargs):
return self.get(fields, geo={
'for': 'place:' % place,
'in': 'state:%s congressional district:%s' % (state_fips, district),
}, **kwargs)
@supported_years(2010)
def state_zipcode(self, fields, state_fips, zcta, **kwargs):
return self.get(fields, geo={
'for': 'zip code tabulation area:%s' % zcta,
'in': 'state:%s' % state_fips,
}, **kwargs)
class SF3Client(Client):
default_year = 2000
dataset = 'sf3'
@supported_years(2000, 1990)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2000, 1990)
def state_county(self, fields, state_fips, county_fips, **kwargs):
return self.get(fields, geo={
'for': 'county:%s' % county_fips,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2000, 1990)
def state_county_tract(self, fields, state_fips, county_fips, tract, **kwargs):
return self.get(fields, geo={
'for': 'tract:%s' % tract,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2000, 1990)
def state_county_blockgroup(self, fields, state_fips, county_fips, blockgroup, **kwargs):
return self.get(fields, geo={
'for': 'block group:%s' % blockgroup,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2000, 1990)
def state_place(self, fields, state_fips, place, **kwargs):
return self.get(fields, geo={
'for': 'place:%s' % place,
'in': 'state:%s' % state_fips,
}, **kwargs)
class Census(object):
ALL = ALL
def __init__(self, key, year=None, session=None):
import requests
if not session:
session = requests.session()
self.acs = ACS5Client(key, year, session)
self.acs5 = ACS5Client(key, year, session)
self.acs1dp = ACS1DpClient(key, year, session)
self.sf1 = SF1Client(key, year, session)
self.sf3 = SF3Client(key, year, session)
| {
"repo_name": "joehand/census",
"path": "census/core.py",
"copies": "1",
"size": "12002",
"license": "bsd-3-clause",
"hash": 496316733574490000,
"line_mean": 30.6675461741,
"line_max": 95,
"alpha_frac": 0.5582402933,
"autogenerated": false,
"ratio": 3.3283416528008876,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9382183233166173,
"avg_score": 0.0008797425869430281,
"num_lines": 379
} |
from functools import wraps
from xml.etree.ElementTree import XML
import json
__version__ = "0.7"
ALL = '*'
ENDPOINT_URL = 'http://api.census.gov/data/%s/%s'
DEFINITIONS = {
'acs5': {
'2013': 'http://api.census.gov/data/2013/acs5/variables.xml',
'2012': 'http://api.census.gov/data/2012/acs5/variables.xml',
'2011': 'http://api.census.gov/data/2011/acs5/variables.xml',
'2010': 'http://api.census.gov/data/2010/acs5/variables.xml',
},
'acs1/profile': {
'2012': 'http://api.census.gov/data/2012/acs1/variables.xml',
},
'sf1': {
'2010': 'http://api.census.gov/data/2010/sf1/variables.xml',
'2000': 'http://api.census.gov/data/2000/sf1/variables.xml',
'1990': 'http://api.census.gov/data/1990/sf1/variables.xml',
},
'sf3': {
'2000': 'http://api.census.gov/data/2000/sf3/variables.xml',
'1990': 'http://api.census.gov/data/1990/sf3/variables.xml',
},
}
class APIKeyError(Exception):
'''Invalid API Key'''
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def list_or_str(v):
""" Convert a single value into a list.
"""
if isinstance(v, (list, tuple)):
return v
return [v]
def supported_years(*years):
def inner(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
year = kwargs.get('year', self.default_year)
if int(year) not in years:
raise UnsupportedYearException('geography is not available in %s' % year)
return func(self, *args, **kwargs)
return wrapper
return inner
class CensusException(Exception):
pass
class UnsupportedYearException(CensusException):
pass
class Client(object):
def __init__(self, key, year=None, session=None):
import requests
self._key = key
self.session = session or requests.session()
if year:
self.default_year = year
@property
def years(self):
return [int(y) for y in DEFINITIONS[self.dataset].keys()]
def fields(self, year, flat=False):
data = {}
fields_url = DEFINITIONS[self.dataset].get(str(year))
if not fields_url:
raise CensusException('%s is not available for %s' % (self.dataset, year))
resp = requests.get(fields_url)
doc = XML(resp.text)
if flat:
for elem in doc.iter('variable'):
data[elem.attrib['name']] = "%s: %s" % (elem.attrib['concept'], elem.text)
else:
for concept_elem in doc.iter('concept'):
concept = concept_elem.attrib['name']
variables = {}
for variable_elem in concept_elem.iter('variable'):
variables[variable_elem.attrib['name']] = variable_elem.text
data[concept] = variables
return data
def get(self, fields, geo, year=None):
if len(fields) > 50:
raise CensusException("only 50 columns per call are allowed")
if year is None:
year = self.default_year
fields = list_or_str(fields)
url = ENDPOINT_URL % (year, self.dataset)
params = {
'get': ",".join(fields),
'for': geo['for'],
'key': self._key,
}
if 'in' in geo:
params['in'] = geo['in']
headers = {
'User-Agent': 'python-census/%s github.com/sunlightlabs/census' % __version__
}
resp = self.session.get(url, params=params, headers=headers)
if resp.status_code == 200:
try:
data = json.loads(resp.text)
except ValueError as ex:
if '<title>Invalid Key</title>' in resp.text:
raise APIKeyError(' '.join(resp.text.splitlines()))
else:
raise ex
headers = data[0]
return [dict(zip(headers, d)) for d in data[1:]]
elif resp.status_code == 204:
return []
else:
raise CensusException(resp.text)
class ACS5Client(Client):
default_year = 2013
dataset = 'acs5'
@supported_years(2013, 2012, 2011, 2010)
def us(self, fields, **kwargs):
return self.get(fields, geo={'for': 'us:1'}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_county(self, fields, state_fips, county_fips, **kwargs):
return self.get(fields, geo={
'for': 'county:%s' % county_fips,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_county_subdivision(self, fields, state_fips, county_fips, subdiv_fips, **kwargs):
return self.get(fields, geo={
'for': 'county subdivision:%s' % subdiv_fips,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_county_tract(self, fields, state_fips, county_fips, tract, **kwargs):
return self.get(fields, geo={
'for': 'tract:%s' % tract,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_county_blockgroup(self, fields, state_fips, county_fips, blockgroup, **kwargs):
return self.get(fields, geo={
'for': 'block group:%s' % blockgroup,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_place(self, fields, state_fips, place, **kwargs):
return self.get(fields, geo={
'for': 'place:%s' % place,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2013, 2012, 2011, 2010)
def state_district(self, fields, state_fips, district, **kwargs):
return self.get(fields, geo={
'for': 'congressional district:%s' % district,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2013, 2012, 2011)
def zipcode(self, fields, zcta, **kwargs):
return self.get(fields, geo={
'for': 'zip code tabulation area:%s' % zcta,
}, **kwargs)
class ACS1DpClient(Client):
default_year = 2012
dataset = 'acs1/profile'
@supported_years(2012)
def us(self, fields, **kwargs):
return self.get(fields, geo={'for': 'us:1'}, **kwargs)
@supported_years(2012)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2012)
def state_district(self, fields, state_fips, district, **kwargs):
return self.get(fields, geo={
'for': 'congressional district:%s' % district,
'in': 'state:%s' % state_fips,
}, **kwargs)
class SF1Client(Client):
default_year = 2010
dataset = 'sf1'
@supported_years(2010, 2000, 1990)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010, 2000, 1990)
def state_county(self, fields, state_fips, county_fips, **kwargs):
return self.get(fields, geo={
'for': 'county:%s' % county_fips,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_county_subdivision(self, fields, state_fips, county_fips, subdiv_fips, **kwargs):
return self.get(fields, geo={
'for': 'county subdivision:%s' % subdiv_fips,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2010, 2000, 1990)
def state_county_tract(self, fields, state_fips, county_fips, tract, **kwargs):
return self.get(fields, geo={
'for': 'tract:%s' % tract,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2010, 2000, 1990)
def state_county_blockgroup(self, fields, state_fips, county_fips, blockgroup, **kwargs):
return self.get(fields, geo={
'for': 'block group:%s' % blockgroup,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2010, 2000, 1990)
def state_place(self, fields, state_fips, place, **kwargs):
return self.get(fields, geo={
'for': 'place:%s' % place,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_district(self, fields, state_fips, district, **kwargs):
return self.get(fields, geo={
'for': 'congressional district:%s' % district,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_msa(self, fields, state_fips, msa, **kwargs):
return self.get(fields, geo={
'for': 'metropolitan statistical area/micropolitan statistical area:%s' % msa,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_csa(self, fields, state_fips, csa, **kwargs):
return self.get(fields, geo={
'for': 'combined statistical area:%s' % csa,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2010)
def state_district_place(self, fields, state_fips, district, place, **kwargs):
return self.get(fields, geo={
'for': 'place:' % place,
'in': 'state:%s congressional district:%s' % (state_fips, district),
}, **kwargs)
@supported_years(2010)
def state_zipcode(self, fields, state_fips, zcta, **kwargs):
return self.get(fields, geo={
'for': 'zip code tabulation area:%s' % zcta,
'in': 'state:%s' % state_fips,
}, **kwargs)
class SF3Client(Client):
default_year = 2000
dataset = 'sf3'
@supported_years(2000, 1990)
def state(self, fields, state_fips, **kwargs):
return self.get(fields, geo={
'for': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2000, 1990)
def state_county(self, fields, state_fips, county_fips, **kwargs):
return self.get(fields, geo={
'for': 'county:%s' % county_fips,
'in': 'state:%s' % state_fips,
}, **kwargs)
@supported_years(2000, 1990)
def state_county_tract(self, fields, state_fips, county_fips, tract, **kwargs):
return self.get(fields, geo={
'for': 'tract:%s' % tract,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2000, 1990)
def state_county_blockgroup(self, fields, state_fips, county_fips, blockgroup, **kwargs):
return self.get(fields, geo={
'for': 'block group:%s' % blockgroup,
'in': 'state:%s county:%s' % (state_fips, county_fips),
}, **kwargs)
@supported_years(2000, 1990)
def state_place(self, fields, state_fips, place, **kwargs):
return self.get(fields, geo={
'for': 'place:%s' % place,
'in': 'state:%s' % state_fips,
}, **kwargs)
class Census(object):
ALL = ALL
def __init__(self, key, year=None, session=None):
import requests
if not session:
session = requests.session()
self.acs = ACS5Client(key, year, session)
self.acs5 = ACS5Client(key, year, session)
self.acs1dp = ACS1DpClient(key, year, session)
self.sf1 = SF1Client(key, year, session)
self.sf3 = SF3Client(key, year, session)
| {
"repo_name": "dmc2015/census",
"path": "census/core.py",
"copies": "1",
"size": "11946",
"license": "bsd-3-clause",
"hash": 1297283924011553000,
"line_mean": 30.7712765957,
"line_max": 95,
"alpha_frac": 0.5581784698,
"autogenerated": false,
"ratio": 3.3275766016713093,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4385755071471309,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import aiohttp
import logging
logger = logging.getLogger(__name__)
async def get_ticket(account, password):
"""You'll receive a ticket and a ton of other things that you probably won't need unless you're
making an f-chat client. Tickets are valid for 24 hours from issue, and invalidate all previous
tickets for the account when issued."""
data = {
'account': account,
'password': password,
}
logger.info("F-List API call: getApiTicket{arguments}".format(arguments=data))
async with aiohttp.ClientSession() as session:
async with session.post("https://www.f-list.net/json/getApiTicket.php", data=data) as response:
return await response.json()
# API definitions
def flist_api_decorator(func):
api_variables = func.__code__.co_varnames
api_name = func.__name__
flist_api_url = "https://www.f-list.net/json/api/{function}.php"
@wraps(func)
async def wrapper(**kwargs):
logger.info("F-List API call: {method}{arguments}".format(method=api_name, arguments=kwargs))
data = {}
for argument in api_variables:
data[argument] = kwargs.get(argument)
async with aiohttp.ClientSession() as session:
async with session.post(flist_api_url.format(function=api_name.replace('_', '-')), data=data) as response:
return await response.json()
return wrapper
# ======================================== Bookmarks
@flist_api_decorator
def bookmark_add(account, ticket, name):
"""Bookmark a profile. Takes one argument, "name"."""
pass
@flist_api_decorator
def bookmark_list(account, ticket):
"""List all bookmarked profiles."""
pass
@flist_api_decorator
def bookmark_remove(account, ticket, name):
"""Remove a profile bookmark. Takes one argument, "name"."""
pass
# ======================================== Character Data
@flist_api_decorator
def character_customkinks(account, ticket, name):
"""Get a character's custom kinks. Requires one parameter, "name"."""
pass
@flist_api_decorator
def character_get(name):
"""Get basic characer info. Does not require the account and ticket form fields.
Requires one parameter, "name"."""
pass
@flist_api_decorator
def character_images(account, ticket, name):
"""Get a list of all character image urls, and some extra info like the dimensions.
Requires one parameter, "name"."""
pass
@flist_api_decorator
def character_info(account, ticket, name):
"""Get a character's profile info fields. Requires one parameter, "name"."""
pass
@flist_api_decorator
def character_kinks(account, ticket, name):
"""Get a character's kinks. Requires one parameter, "name"."""
pass
@flist_api_decorator
def character_list(account, ticket):
"""Get a list of all the account's characters."""
pass
# ======================================== Miscellaneous Data
@flist_api_decorator
def group_list(account, ticket):
"""Get the global list of all f-list groups."""
pass
@flist_api_decorator
def ignore_list(account, ticket):
"""Get a list of all profiles your account has on chat-ignore."""
pass
@flist_api_decorator
def info_list():
"""Get the global list of profile info fields, grouped. Dropdown options include a list of the options.
Does not require the account and ticket form fields."""
pass
@flist_api_decorator
def kink_list(account, ticket):
"""Get the global list of kinks, grouped. Does not require the account and ticket form fields."""
pass
# ======================================== Friend list data, Friend requests
@flist_api_decorator
def friend_list(account, ticket):
"""List all friends, account-wide, in a "your-character (dest) => the character's friend (source)" format."""
pass
@flist_api_decorator
def friend_remove(account, ticket, source_name, dest_name):
"""Remove a profile from your friends. Takes two argument, "source_name" (your char)
and "dest_name" (the character's friend you're removing)."""
pass
@flist_api_decorator
def request_accept(account, ticket, request_id):
"""Accept an incoming friend request. Takes one argument, "request_id", which you
can get with the request-list endpoint."""
pass
@flist_api_decorator
def request_cancel(account, ticket, request_id):
"""Cancel an outgoing friend request. Takes one argument, "request_id", which you can
get with the request-pending endpoint."""
pass
@flist_api_decorator
def request_deny(account, ticket, request_id):
"""Deny a friend request. Takes one argument, "request_id", which you can get with the request-list endpoint."""
pass
@flist_api_decorator
def request_list(account, ticket):
"""Get all incoming friend requests."""
pass
@flist_api_decorator
def request_pending(account, ticket):
"""Get all outgoing friend requests."""
pass
@flist_api_decorator
def request_send(account, ticket, source_name, dest_name):
"""Send a friend request. source_name, dest_name."""
pass
del flist_api_decorator
| {
"repo_name": "StormyDragon/python-flist",
"path": "flist/api.py",
"copies": "1",
"size": "5114",
"license": "bsd-2-clause",
"hash": 6765148392708345000,
"line_mean": 27.8926553672,
"line_max": 118,
"alpha_frac": 0.6691435276,
"autogenerated": false,
"ratio": 3.90381679389313,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.507296032149313,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import asyncio
from concurrent.futures import TimeoutError
from .log import logger
def _default_retry_for_result(result):
return False
def _default_retry_for_exception(exception):
return False
def retry(*dargs, **dkwargs):
if len(dargs) == 1 and callable(dargs[0]):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
yield from RetryHandler().run(func, *args, **kwargs)
return wrapper
return decorator(dargs[0])
else:
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
yield from RetryHandler(*dargs, **dkwargs).run(func, *args, **kwargs)
return wrapper
return decorator
class RetryHandler:
def __init__(self, should_retry_for_result=_default_retry_for_result,
should_retry_for_exception=_default_retry_for_exception,
multiplier=2, timeout=None, max_attempts=None, strategy=None):
"""
:param should_retry_for_result: A function that is called with argument as result to allow retrial for specific
set of results. Must return a boolean value
:param should_retry_for_exception: A function that is called if the function to be retried threw an exception
allow retrial for specific set of exceptions. Must return a boolean value
:param multiplier: Must be an integer value, If defined, the retrial would be exponential with this behind the
multiplier
:param timeout: If defined, the function will be retried if no result was returned in this time.
:param max_attempts: Max number of attempts to retry
:param strategy: Must be a list of integers. If defined, retrial would follow this strategy. For ex. if strategy
is [1,3,5,8,11], function would be retried at 1, 3, 5, 8, 11, 11, 11, ... seconds
:return:
"""
self._should_retry_for_result = should_retry_for_result
self._should_retry_for_exception = should_retry_for_exception
self._multiplier = multiplier
self._timeout = timeout
self._max_attempts = max_attempts
self._strategy = strategy
@asyncio.coroutine
def run(self, func, *args, **kwargs):
if not asyncio.iscoroutinefunction(func):
func = asyncio.coroutine(func)
return (yield from self._run_task(0, func, *args, **kwargs))
@asyncio.coroutine
def _run_task(self, attempts_made, func, *args, **kwargs):
if self._max_attempts is not None and attempts_made > self._max_attempts:
return
wait_time = self._get_wait_time(attempts_made)
logger.info('Retrying %s after %s seconds', func.__name__, wait_time)
yield from asyncio.sleep(wait_time)
try:
if self._timeout is None:
result = yield from func(*args, **kwargs)
else:
try:
result = yield from asyncio.wait_for(func(*args, **kwargs), self._timeout)
except TimeoutError:
return (yield from self._run_task(attempts_made + 1, func, *args, **kwargs))
logger.info('Result retrieved from %s is %s', func.__name__, result)
if self._should_retry_for_result(result):
return (yield from self._run_task(attempts_made + 1, func, *args, **kwargs))
else:
return result
except Exception as e:
logger.exception('%s raised exception %s', func.__name__, e.__class__.__name__)
if self._should_retry_for_exception(e):
yield from self._run_task(attempts_made + 1, func, *args, **kwargs)
else:
raise
def _get_wait_time(self, attempts_made):
if self._strategy:
return self._strategy[min(attempts_made, len(self._strategy) - 1)]
else:
return self._multiplier ** attempts_made if attempts_made != 0 else 0
| {
"repo_name": "nerandell/async_retrial",
"path": "retrial/retrial/retry.py",
"copies": "1",
"size": "4143",
"license": "bsd-2-clause",
"hash": 6631904108205326000,
"line_mean": 40.8484848485,
"line_max": 120,
"alpha_frac": 0.5942553705,
"autogenerated": false,
"ratio": 4.3427672955974845,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5437022666097484,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
import asyncio
import random
from .connection import Connection
from .exceptions import NoAvailableConnectionsInPoolError
from .protocol import RedisProtocol, Script
from .nodemanager import NodeManager
from .crc import crc16
__all__ = ('Pool',)
class Pool:
"""
Pool of connections. Each
Takes care of setting up the connection and connection pooling.
When poolsize > 1 and some connections are in use because of transactions
or blocking requests, the other are preferred.
::
pool = yield from Pool.create(host='localhost', port=6379, poolsize=10)
result = yield from connection.set('key', 'value')
"""
@classmethod
@asyncio.coroutine
def create(cls, nodes, *, password=None, db=0,
encoder=None, poolsize=1, auto_reconnect=True, loop=None,
protocol_class=RedisProtocol):
"""
Create a new connection pool instance.
:param host: Address, either host or unix domain socket path
:type host: list
:param port: TCP port. If port is 0 then host assumed to be unix socket path
:type port: int
:param password: Redis database password
:type password: bytes
:param db: Redis database
:type db: int
:param encoder: Encoder to use for encoding to or decoding from redis bytes to a native type.
:type encoder: :class:`~asyncio_redis.encoders.BaseEncoder` instance.
:param poolsize: The number of parallel connections.
:type poolsize: int
:param auto_reconnect: Enable auto reconnect
:type auto_reconnect: bool
:param loop: (optional) asyncio event loop.
:type protocol_class: :class:`~asyncio_redis.RedisProtocol`
:param protocol_class: (optional) redis protocol implementation
"""
self = cls()
self.nodes = NodeManager(nodes)
yield from self.nodes.initialize()
self._poolsize = poolsize
# Create connections
self._connections = {}
for node in self.nodes.nodes:
host = self.nodes.nodes[node]['host']
port = self.nodes.nodes[node]['port']
self._host = host
self._port = port
self._connections[node] = []
for i in range(poolsize):
connection = yield from Connection \
.create(host=self._host, port=self._port,
password=password, db=db, encoder=encoder,
auto_reconnect=auto_reconnect, loop=loop,
protocol_class=protocol_class)
self._connections[node].append(connection)
return self
def __repr__(self):
return 'Pool(host=%r, port=%r, poolsize=%r)' % (self._host, self._port, self._poolsize)
@property
def poolsize(self):
""" Number of parallel connections in the pool."""
return self._poolsize
@property
def connections_in_use(self):
"""
Return how many protocols are in use.
"""
raise NotImplementedError
return sum([1 for c in self._connections if c.protocol.in_use])
@property
def connections_connected(self):
"""
The amount of open TCP connections.
"""
raise NotImplementedError
return sum([1 for c in self._connections if c.protocol.is_connected])
def _get_free_connection(self, host):
"""
Return the next protocol instance that's not in use.
(A protocol in pubsub mode or doing a blocking request is considered busy,
and can't be used for anything else.)
"""
self._shuffle_connections(host)
for c in self._connections[host]:
if c.protocol.is_connected and not c.protocol.in_use:
return c
def _get_host_by_key(self, key, is_read=False):
slot = crc16(key) % self.nodes.RedisClusterHashSlots
if not is_read:
index = 0
else:
index = random.randint(0, len(self.nodes.slots[slot]) - 1)
self.used_index = index
return self.nodes.slots[slot][index]
def _shuffle_connections(self, node):
"""
'shuffle' protocols. Make sure that we devide the load equally among the protocols.
"""
self._connections[node] = self._connections[node][1:] + self._connections[node][:1]
def __getattr__(self, name):
"""
Proxy to a protocol. (This will choose a protocol instance that's not
busy in a blocking request or transaction.)
"""
def get_key(*args, debug=False):
is_read = False
if name in ('get', 'hget', 'hmget', 'hgetall'):
is_read = True
if not args:
raise Exception('You must specify an argument (FIXME)')
# handle multi-key operations
key = args[0]
host = None
if type(key) is list:
for k in key:
tmp = self._get_host_by_key(k, is_read=is_read)
if host and tmp != host:
raise Exception('Multi-key operation is not possible because of different target hosts.')
host = tmp
else:
host = self._get_host_by_key(key, is_read=is_read)
connection = self._get_free_connection(host)
if is_read:
if self.used_index > 0:
yield from getattr(connection, 'readonly')()
if debug:
print('[DEBUG]: Connecting to', connection)
if connection:
result = yield from getattr(connection, name)(*args)
return result
else:
raise NoAvailableConnectionsInPoolError(
'No available connections in the pool: size=%s, in_use=%s, connected=%s' % (
self.poolsize, self.connections_in_use, self.connections_connected))
return get_key
# Proxy the register_script method, so that the returned object will
# execute on any available connection in the pool.
@asyncio.coroutine
@wraps(RedisProtocol.register_script)
def register_script(self, script: str) -> Script:
# Call register_script from the Protocol.
script = yield from self.__getattr__('register_script')(script)
assert isinstance(script, Script)
# Return a new script instead that runs it on any connection of the pool.
return Script(script.sha, script.code, lambda: self.evalsha)
def close(self):
"""
Close all the connections in the pool.
"""
for node in self._connections:
for c in self._connections[node]:
c.close()
self._connections = {}
| {
"repo_name": "rightbraindev/asyncio-redis-cluster",
"path": "asyncio_redis_cluster/pool.py",
"copies": "1",
"size": "6873",
"license": "bsd-2-clause",
"hash": -3439923496479854000,
"line_mean": 33.5376884422,
"line_max": 113,
"alpha_frac": 0.5866433872,
"autogenerated": false,
"ratio": 4.49803664921466,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.558468003641466,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.