text
stringlengths
0
1.05M
meta
dict
from functools import partial class Maybe(): pass class Just(Maybe): def __init__(self, value): self.value = value def bind(self, f): return f(self.value) @staticmethod def return_(value): return Just(value) def __repr__(self): return "Just %s" % self.value class Nothing(Maybe): @staticmethod def return_(): return Nothing() def bind(self, f): return Nothing() def __repr__(self): return "Nothing" test_dict = {'existing' : {'bar' : 42}} def lookup(key, d): if key in d: return Just.return_(d[key]) else: return Nothing.return_() # wrap into Maybe monad m = Just.return_(test_dict) print(m) # access an existing key, we should get Just result m1 = m.bind(partial(lookup, 'existing')) print(m1) # access a key that does not exist, we should get Nothing m2 = m1.bind(partial(lookup, 'notexisting')) print(m2) # access anything on that Nothing should return Nothing m3 = m2.bind(partial(lookup, 'whatever')) print(m3)
{ "repo_name": "Leonidas-from-XIV/7langs7weeks", "path": "haskell/day3.py", "copies": "1", "size": "1047", "license": "apache-2.0", "hash": 6564681888036964000, "line_mean": 19.94, "line_max": 57, "alpha_frac": 0.6227316141, "autogenerated": false, "ratio": 3.4554455445544554, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4578177158654455, "avg_score": null, "num_lines": null }
from functools import partial class memoize_base(object): """ Memoize a method. Customized from the recipe below to allow different caches. Return values are cached on the relevant object. http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ """ @property def cache_name(self): raise NotImplementedError def __init__(self, func): self.func = func def __get__(self, obj, objtype = None): if obj is None: return self.func return partial(self, obj) def __call__(self, *args, **kwargs): obj = args[0] try: cache = getattr(obj, self.cache_name) except AttributeError: cache = {} setattr(obj, self.cache_name, cache) key = (self.func, args[1:], frozenset(kwargs.items())) try: res = cache[key] except KeyError: res = cache[key] = self.func(*args, **kwargs) return res class memoize_method(memoize_base): """Memoize an instance method.""" cache_name = '_memoize_method_cache' class memoize_class_method(memoize_base): """Memoize a class method.""" cache_name = '_memoize_class_method_cache'
{ "repo_name": "ironweb/lesfeuxverts-backend", "path": "utils/memoize.py", "copies": "1", "size": "1082", "license": "mit", "hash": -1448529162090622500, "line_mean": 22.5434782609, "line_max": 85, "alpha_frac": 0.6885397412, "autogenerated": false, "ratio": 3.1002865329512894, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8865748988460744, "avg_score": 0.08461545713810895, "num_lines": 46 }
from functools import partial class memoize(object): """cache the return value of a method This class is meant to be used as a decorator of methods. The return value from a given method invocation will be cached on the instance whose method was invoked. All arguments passed to a method decorated with memoize must be hashable. If a memoized method is invoked directly on its class the result will not be cached. Instead the method will be invoked like a static method: class Obj(object): @memoize def add_to(self, arg): return self + arg Obj.add_to(1) # not enough arguments Obj.add_to(1, 2) # returns 3, result is not cached """ def __init__(self, func): self.func = func def __get__(self, obj, objtype=None): if obj is None: return self.func return partial(self, obj) def __call__(self, *args, **kw): obj = args[0] try: cache = obj.__cache except AttributeError: cache = obj.__cache = {} key = (self.func, args[1:], frozenset(kw.items())) try: res = cache[key] except KeyError: res = cache[key] = self.func(*args, **kw) return res if __name__ == "__main__": # example usage class Test(object): v = 0 @memoize def inc_add(self, arg): self.v += 1 return self.v + arg t = Test() assert t.inc_add(2) == t.inc_add(2) assert Test.inc_add(t, 2) != Test.inc_add(t, 2)
{ "repo_name": "ActiveState/code", "path": "recipes/Python/577452_memoize_decorator_instance/recipe-577452.py", "copies": "1", "size": "1569", "license": "mit", "hash": 4402852986898084000, "line_mean": 29.7647058824, "line_max": 78, "alpha_frac": 0.5685149777, "autogenerated": false, "ratio": 3.9127182044887783, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4981233182188778, "avg_score": null, "num_lines": null }
from functools import partial class PeekableIterator(object): ''' Wrapping a generator with this provides a peek() function that lets you see whats coming on the next iteration. If the wrapped generator is done, calling peek() will return: <class 'StopIteration'> so you can control flow without needing to write code that relies on raised exceptions. ''' def __init__(self, pipe): self.pipe=iter(pipe) self.preview=None self.steps=-2 self._started=False self._next=partial( next, self.pipe, StopIteration ) def peek(self): return self.preview def _first_step(self): self._started=True # Since self.preview is returned in _step # _step needs to be ran twice the first time. # # This is not done in __init__ because # iterators are lazy and do not manipulate # input pipes until their first iteration. return self._step() def _step(self, _input=None): self.steps+=1 if self._started: prev = self.preview self.preview = self._next() return prev else: self._first_step() __next__, next, send = _step, _step, _step def __iter__(self): return iter(self._step, StopIteration) def __str__(self): return '<PeekableIterator steps={} next={}>'.format(*(( self.steps, repr(self.preview) ) if self._started else ( 0, 'NotStarted' ))) __repr__=__str__ def peekable(pipe): return PeekableIterator(pipe) if __name__ == '__main__': g=(i for i in range(1,10)) g=peekable(g) print(next(g)) print(next(g)) print(g.peek()) print(next(g)) for i in g: print('i -',i) print('preview -',g.peek()) print(g) print(peekable(range(1))) print('---')
{ "repo_name": "CodyKochmann/generators", "path": "generators/peekable.py", "copies": "1", "size": "1974", "license": "mit", "hash": 8541235865153075000, "line_mean": 24.6363636364, "line_max": 63, "alpha_frac": 0.5466058764, "autogenerated": false, "ratio": 4.061728395061729, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5108334271461729, "avg_score": null, "num_lines": null }
from functools import partial def frameCollapseChanged(mainLayout): mc.evalDeferred("mc.window('ToolUI', e=1, h=sum([eval('mc.' + mc.objectTypeUI(child) + '(\\'' + child + '\\', q=1, h=1)') for child in mc.columnLayout('" + mainLayout + "', q=1, ca=1)]))") #check to see if window exists if mc.window("ToolUI", exists = True): mc.deleteUI("ToolUI") #create window ToolWindow = mc.window("ToolUI", title = "Tool UI", w = 300, h = 300, mnb = True, mxb = False, sizeable = False) #create a main layout mainLayout = mc.columnLayout (w =300) ### FIRST TAB ### frameLayout1 = mc.frameLayout (width = 300, label = "Tab 1", collapse = True, collapsable = True, marginWidth = 5, parent = mainLayout, ec=partial(frameCollapseChanged, str(mainLayout)), cc=partial(frameCollapseChanged, str(mainLayout))) #create a button mc.button (label = "Button 1", w = 280, h = 50, command = 'print "hello",', parent = frameLayout1) #create a button mc.button (label = "Button 2", w = 280, h = 50, command = 'print "hello",', parent = frameLayout1) #create a button mc.button (label = "Button 3", w = 280, h = 50, command = 'print "hello",', parent = frameLayout1) ### SECOND TAB ### frameLayout2 = mc.frameLayout (width = 300, label = "Tab 2", collapse = True, collapsable = True, marginWidth = 5, parent = mainLayout, ec=partial(frameCollapseChanged, str(mainLayout)), cc=partial(frameCollapseChanged, str(mainLayout))) #create a button mc.button (label = "Button 4", w = 280, h = 50, command = 'print "hello",', parent = frameLayout2) #create a button mc.button (label = "Button 5", w = 280, h = 50, command = 'print "hello",', parent = frameLayout2) #create a button mc.button (label = "Button 6", w = 280, h = 50, command = 'print "hello",', parent = frameLayout2 ) #show window mc.showWindow(ToolWindow) I added a function that gets called every time one of the frames is collapsed or expanded. It gets all the children of the main layout, adds up their height, and then changes the window height. Here is what it's doing, without the eval deferred or list comprehension: winHeight = 0 # iterate through all children of the main layout for child in mc.columnLayout(mainLayout, q=1, ca=1): # for each child, get it's type, then use that run an eval command to get that ui item's height and add it to the height variable winHeight += eval('mc.' + mc.objectTypeUI(child) + '("' + child + '", q=1, h=1)') # set the window height with the gathered height values mc.window('ToolUI', e=1, h=winHeight)
{ "repo_name": "aaronfang/personal_scripts", "path": "scripts/changeUIWhenFrameCollapes.py", "copies": "2", "size": "2486", "license": "mit", "hash": 8185489675350577000, "line_mean": 53.0652173913, "line_max": 267, "alpha_frac": 0.6979082864, "autogenerated": false, "ratio": 3.2285714285714286, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9710751306355931, "avg_score": 0.04314568172309949, "num_lines": 46 }
from functools import partial def my_decorator(func): print("this is executed from my_decorator") def wrapper(*args, **kwargs): print("this is executed from wrapper") print('func name -->', func.__qualname__) return func(*args, **kwargs) return wrapper def decorator_with_args(prefix = ''): print(locals()) def decorator(func): msg = prefix + func.__qualname__ + prefix print(locals()) def wrapper(*args, **kwargs): print(msg) print(locals()) return func(*args, **kwargs) return wrapper return decorator from functools import partial def my_decorator(func = None, *, prefix = ''): if func is None: return partial(my_decorator, prefix = prefix) msg = prefix + func.__qualname__ + prefix def wrapper(*args, **kwargs): print(msg) return func(*args, **kwargs) return wrapper @my_decorator def add(x, y): print("function add is called") return x + y @my_decorator(prefix = '###') def mul(x, y): print("function mul is called") return x * y a = my_decorator(add) def awesome_function(a = 0, b = 0, *, prefix): print('a ->', a) print('b ->', b) print('prefix ->', prefix) return prefix + str(a+b)
{ "repo_name": "kmad1729/python_notes", "path": "metaprogramming/practice_code/my_closures.py", "copies": "1", "size": "1293", "license": "unlicense", "hash": 6729089734755528000, "line_mean": 21.6842105263, "line_max": 53, "alpha_frac": 0.5823665893, "autogenerated": false, "ratio": 3.802941176470588, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9820952363441443, "avg_score": 0.01287108046582898, "num_lines": 57 }
from functools import partial def next_id(r): return r.incr("nlevel_ids") def make_key(*parts): return ":".join(map(str, parts)) make_node_key = partial(make_key, "n") make_meta_key = partial(make_key, "m") make_index_key = partial(make_key, "i") def list_to_pairs(flat_list): return zip(flat_list[::2], flat_list[1::2]) def render(key, info, meta): m = {'key': key, 'info': info} if 'p' in meta: m['parent'] = meta['p'] return m def node(r, info, parent=None): id = next_id(r) node_key = make_node_key(id) meta_key = make_meta_key(node_key) if parent: index_key = make_index_key(parent) else: index_key = make_index_key("nlevel_roots") with r.pipeline() as pipe: pipe.hmset(node_key, info) pipe.zadd(index_key, node_key, float(id)) if parent: pipe.hset(meta_key, 'p', parent) pipe.execute() return node_key def info(r, node_key): meta_key = make_meta_key(node_key) with r.pipeline() as pipe: pipe.hgetall(node_key) pipe.hgetall(meta_key) info, meta = pipe.execute() return render(node_key, info, meta) def nodes(r, key): index_key = make_index_key(key) node_keys = r.zrange(index_key, 0, -1) with r.pipeline() as pipe: for node_key in node_keys: pipe.hgetall(node_key) pipe.hgetall(make_meta_key(node_key)) pairs = list_to_pairs(pipe.execute()) args = [[key] + list(pairs) for key, pairs in zip(node_keys, pairs)] return map(lambda x: render(*x), args) def roots(r): return nodes(r, "nlevel_roots")
{ "repo_name": "benwilber/nlevel", "path": "nlevel/core.py", "copies": "1", "size": "1644", "license": "mit", "hash": -5058143327267118000, "line_mean": 22.8260869565, "line_max": 72, "alpha_frac": 0.5900243309, "autogenerated": false, "ratio": 2.9357142857142855, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8942207865963969, "avg_score": 0.016706150130063175, "num_lines": 69 }
from functools import partial def resolve(source_str=None): try: return int(source_str) except ValueError: obj = c[source_str] #If we have not resolved it yet, call it now if hasattr(obj, '__call__'): c[source_str] = c[source_str]() return c[source_str] def lshift(left=None, right=None): return left() << right() def rshift(left=None, right=None): return left() >> right() def orop(left=None, right=None): return left() | right() def andop(left=None, right=None): return left() & right() def notop(left=None): return ~ left() if __name__ == '__main__': c={ } with open('../input.txt', 'r') as fp: for line in [line.strip() for line in fp if line != '']: parts=line.split(' ') parts_l=len(parts) target=parts[-1] source=partial(resolve, source_str=parts[-3]) #Direct source assignment if parts_l == 3: c[target]=source #Single source elif parts_l == 4: op=parts[-4] if op == 'NOT': c[target]=partial(notop, left=source) #Two sources elif parts_l == 5: op=parts[-4] source2=partial(resolve, source_str=parts[-5]) if op == 'LSHIFT': c[target]=partial(lshift, left=source2, right=source) elif op == 'RSHIFT': c[target]=partial(rshift, left=source2, right=source) elif op == 'OR': c[target]=partial(orop, left=source2, right=source) elif op == 'AND': c[target]=partial(andop, left=source2, right=source) print(c['a']())
{ "repo_name": "tosmun/AdventOfCode", "path": "solutions/day7/p1/main.py", "copies": "1", "size": "1435", "license": "apache-2.0", "hash": -7549909831437743000, "line_mean": 26.5961538462, "line_max": 58, "alpha_frac": 0.6271777003, "autogenerated": false, "ratio": 2.791828793774319, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.3919006494074319, "avg_score": null, "num_lines": null }
from functools import partial def sum_of_squares(v): return sum(v_i ** 2 for v_i in v) def difference_quotient(f,x,h): return (f(x+h) - f(x))/ h def square(x): return x * x def derivative(x): return 2 * x derivative_estimate = partial(difference_quotient,square,h=0.00001) # import matplotlib.pyplot as plt # x = range(-10,10) # plt.title("Actual derivates vs estimates") # plt.plot(x,map(derivative,x),'rx',label='Actual') # plt.plot(x,map(derivative_estimate,x),'b+',label='Estimate') # plt.plot(x,map(square,x),'g-',label='Square') # plt.legend(loc=9) # plt.show() def partial_difference_quotient(f,v,i,h): """ Keep vector untouched except at relevant position i for the partial derivative """ w = [v_j + (h if j == i else 0) for j,v_j in enumerate(v)] return (f(w) - f(v)) / h """ no difference in performance since the values of v have to be copied into w anyway """ def partial_difference_quotient(f,v,i,h): for j,v_j in enumerate(v): if j == i: w.append(v_j + h) else: w.append(v_j) return (f(w) - f(v))/h def estimate_gradient(f,v,h=0.00001): """ Return a vector of the partial difference quotients """ return [partial_difference_quotient(f,v,i,h) for i, _ in enumerate(v)] def step(self,v,direction,step_size): return [v_i + step_size * direction_i for v_i,direction in zip(v,direction)] def sum_of_squares_gradient(v): return [2 * v_i for v_i in v] def solve(tolerance=0.00001): #pick a random starting point v = [random.randint(-10,10) for i in range(3)] while True: gradient = sum_of_squares_gradient(v) next_v = step(v,gradient,-0.01) if distance(next_v,v) < tolerance: break v = next_v def safe(f): """ returns same function as f except that it uses infinity instead of an error """ def safe_f(*args,**kwargs): try: return f(*args,**kwargs) except: return float('inf') return safe_f def minimize_batch(target_fn,gradient_fn,theta_0,tolerance = 0.00001): step_sizes = [100,10,1,0.1,0.01,0.001.0.0001] theta = theta_0 target_fn = safe(target_fn) value = target_fn(theta) while True: gradient = gradient_fn(theta) next_thetas = [step(theta,gradient,-step_size) for step_size in step_sizes] next_thetas = min(next_thetas,key=target_fn) next_value = target_fn(next) if abs(value - next_value) < tolerance: return theta else: theta, value = next_theta, next_value def negate(f): return lambda *args,**kwargs: -f(*args,**kwargs) def negate_all(f): return lambda *args, **kwargs: [-y for y in f(*args,**kwargs)] def maximize_batch(target_fn,gradient_fn,theta_0,tolerance=0.00001): return minimize_batch(negate(target_fn), negate_all(gradient_fn), theta_0, tolerance) def in_random_order(data): indexes = [i for i,_ in enumerate(data)] #create a list of indexes random.shuffle(indexes) for i in indexes: yield data[i] def minimize_stochastic(target_fn,gradient_fn,x,y,theta_0,alpha_0=0.01): data = zip(x,y) theta = theta_0 #initial guess alpha = alpha_0 #initial step size min_theta, min_value = None, float("inf") iterations_with_no_improvement = 0 while iterations_with_no_improvement < 100: value = sum(target_fn(x_i,y_i,theta) for x_i,y_i in data) """ change learning rate to intial learning rate if improvement is made """ if value < min_value: min_theta ,min_value = theta,value iterations_with_no_improvement = 0 alpha = alpha_0 else: iterations_with_no_improvement += 1 alpha = 0.9 for x_i,y_i in in_random_order(data): gradient_i = gradient_fn(x_i,y_i,theta) theta = vector_substract(theta,scalar_multiply(alpha,gradient_i)) return min_theta def maximize_stochastic(target_fn,gradient_fn,x,y,theta_0,alpha_0=0.01): return minimize_stochastic(negate(target_fn), negate_all(gradient_fn), x,y,theta_0,alpha_0)
{ "repo_name": "buckiracer/data-science-from-scratch", "path": "RefMaterials/Library/gradient.py", "copies": "1", "size": "3873", "license": "unlicense", "hash": -2366918828078589400, "line_mean": 23.9870967742, "line_max": 83, "alpha_frac": 0.6692486445, "autogenerated": false, "ratio": 2.6673553719008263, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.38366040164008264, "avg_score": null, "num_lines": null }
from functools import partial from aiohttp import web from aiohttp_middlewares import error_middleware, get_error_response from aiohttp_middlewares.annotations import Middleware from ..annotations import DictStrAny, Handler from .constants import REQUEST_CORE_OPERATION_KEY from .core_data import find_core_operation from .validators import validate_request, validate_response def get_actual_handler(handler: Handler) -> Handler: """Remove partially applied middlewares from actual handler. aiohttp wraps handler into middlewares, so if any middlewares declared in application after ``openapi_middleware`` the handler will look like:: functools.partial( <function middleware1.<locals>.middleware at 0x111325b80>, handler=functools.partial( <function middleware2.<locals>.middleware at 0x111325ca0>, handler=<function actual_handler at 0x1112aa700> ) ) In that case ``HANDLER_OPENAPI_MAPPING_KEY`` will not be accessed in the partial, which results that handler will not be validated against OpenAPI schema. """ if isinstance(handler, partial) and "handler" in handler.keywords: return get_actual_handler(handler.keywords["handler"]) return handler def openapi_middleware( *, is_validate_response: bool = True, use_error_middleware: bool = True, error_middleware_kwargs: DictStrAny = None ) -> Middleware: """Middleware to handle requests to handlers covered by OpenAPI schema. In most cases you don't need to add it to list of ``web.Application`` middlewares as :func:`rororo.openapi.setup_openapi` will setup it for you, but if, for some reason, you don't want to call high order ``setup_openapi`` function, you'll need to add given middleware to your :class:`aiohttp.web.Applicaiton` manually. """ error_middleware_instance = ( error_middleware(**error_middleware_kwargs or {}) if use_error_middleware else None ) async def get_response( request: web.Request, handler: Handler ) -> web.StreamResponse: if error_middleware_instance is None: return await handler(request) return await error_middleware_instance(request, handler) @web.middleware async def middleware( request: web.Request, handler: Handler ) -> web.StreamResponse: # At first, check that given handler registered as OpenAPI operation # handler. For this check remove all partially applied middlewares # from the handler, core_operation = find_core_operation( request, get_actual_handler(handler) ) if core_operation is None: return await get_response(request, handler) try: # Run actual `aiohttp.web` handler for requested operation request[REQUEST_CORE_OPERATION_KEY] = core_operation response = await get_response( await validate_request(request), handler ) # For performance considerations it is useful to turn off # validating responses at production environment as unfortunately # it will need to do extra checks after response is ready if is_validate_response: validate_response(request, response) return response except Exception as err: # noqa: PIE786 return await get_error_response( request, err, **error_middleware_kwargs or {} ) return middleware
{ "repo_name": "playpauseandstop/rororo", "path": "src/rororo/openapi/middlewares.py", "copies": "1", "size": "3583", "license": "bsd-3-clause", "hash": 5337248108023173000, "line_mean": 36.3229166667, "line_max": 78, "alpha_frac": 0.6717834217, "autogenerated": false, "ratio": 4.552731893265565, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 96 }
from functools import partial from ....async.clients import Pool from ....utils.string import to_string from ..store import RemoteStore from .client import RedisClient, Pipeline, Consumer, RedisStoreConnection from .pubsub import RedisPubSub, RedisChannels class RedisStore(RemoteStore): '''Redis :class:`.Store` implementation. ''' supported_queries = frozenset(('filter', 'exclude')) def _init(self, namespace=None, pool_size=10, decode_responses=False, **kwargs): self.protocol_factory = partial(RedisStoreConnection, Consumer) self._decode_responses = decode_responses if namespace: self._urlparams['namespace'] = namespace self._pool = Pool(self.connect, pool_size=pool_size, loop=self._loop) if self._database is None: self._database = 0 self._database = int(self._database) self.loaded_scripts = set() @property def pool(self): return self._pool @property def namespace(self): '''The prefix namespace to append to all transaction on keys ''' n = self._urlparams.get('namespace') return '%s:' % n if n else '' def key(self): return (self._dsn, self._encoding) def client(self): '''Get a :class:`.RedisClient` for the Store''' return RedisClient(self) def pipeline(self): '''Get a :class:`.Pipeline` for the Store''' return Pipeline(self) def pubsub(self, protocol=None): return RedisPubSub(self, self.protocol_factory, protocol=protocol) def channels(self, protocol=None, **kw): return RedisChannels(self.pubsub(protocol=protocol), **kw) def ping(self): return self.client().ping() async def execute(self, *args, **options): connection = await self._pool.connect() async with connection: result = await connection.execute(*args, **options) return result async def execute_pipeline(self, commands, raise_on_error=True): conn = await self._pool.connect() async with conn: result = await conn.execute_pipeline(commands, raise_on_error) return result async def connect(self, protocol_factory=None): protocol_factory = protocol_factory or self.create_protocol if isinstance(self._host, tuple): host, port = self._host transport, connection = await self._loop.create_connection( protocol_factory, host, port) else: raise NotImplementedError('Could not connect to %s' % str(self._host)) if self._password: await connection.execute('AUTH', self._password) if self._database: await connection.execute('SELECT', self._database) return connection def flush(self): return self.execute('flushdb') def close(self): '''Close all open connections.''' return self._pool.close() def has_query(self, query_type): return query_type in self.supported_queries def basekey(self, meta, *args): key = '%s%s' % (self.namespace, meta.table_name) postfix = ':'.join((to_string(p) for p in args if p is not None)) return '%s:%s' % (key, postfix) if postfix else key def meta(self, meta): '''Extract model metadata for lua script stdnet/lib/lua/odm.lua''' # indices = dict(((idx.attname, idx.unique) for idx in meta.indices)) data = meta.as_dict() data['namespace'] = self.basekey(meta) return data class CompiledQuery: def __init__(self, pipe, query): self.pipe = pipe
{ "repo_name": "quantmind/pulsar", "path": "pulsar/apps/data/redis/store.py", "copies": "1", "size": "3707", "license": "bsd-3-clause", "hash": 6435563707458999000, "line_mean": 32.0982142857, "line_max": 78, "alpha_frac": 0.6134340437, "autogenerated": false, "ratio": 4.217292377701934, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5330726421401935, "avg_score": null, "num_lines": null }
from functools import partial from .base import TationDjangoTationManager __all__ = ( 'annotation', 'cached_annotation' ) def wrap_class_as_func(instance, func): for wrapper_attr in ('__module__', '__name__', '__qualname__', '__doc__'): try: setattr(instance, wrapper_attr, getattr(func, wrapper_attr)) except AttributeError: pass class AnnotationProperty: def __init__(self, annotate, func): self.func = func self._djangotation = TationDjangoTationManager(annotate, func.__name__) wrap_class_as_func(self, func) def __get__(self, instance, owner): if instance is None: return self try: return instance.__dict__[self.func.__name__] except KeyError: return self.func(instance) def __set__(self, instance, value): if instance is not None: instance.__dict__[self.func.__name__] = value class CachedAnnotationProperty(AnnotationProperty): def __get__(self, instance, owner): if instance is None: return self res = instance.__dict__[self.func.__name__] = super(CachedAnnotationProperty, self).__get__(instance, owner) return res def annotation(annotate): return partial(AnnotationProperty, annotate) def cached_annotation(annotate): return partial(CachedAnnotationProperty, annotate)
{ "repo_name": "CyboLabs/Djangotation", "path": "djangotation/decorators.py", "copies": "1", "size": "1411", "license": "mit", "hash": -3746651793042019300, "line_mean": 25.6226415094, "line_max": 116, "alpha_frac": 0.6208362863, "autogenerated": false, "ratio": 4.314984709480123, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.00016126431220770846, "num_lines": 53 }
from functools import partial from bearlibterminal import terminal from clubsandwich.ui import ( UIScene, WindowView, LayoutOptions, KeyAssignedListView, ButtonView, LabelView ) class InventoryWindow(UIScene): def __init__(self, wielded_items, worn_items, inventory_items): self.covers_screen = True wielded_buttons = [ ButtonView(item.name, callback=partial(self.describe_item, item)) for item in sorted(set(wielded_items), key=lambda item: item.name) ] worn_buttons = [ ButtonView(item.name, callback=partial(self.describe_item, item)) for item in sorted(set(worn_items), key=lambda item: item.name) ] inventory_buttons = [ ButtonView(item.name, callback=partial(self.describe_item, item)) for item in sorted(set(inventory_items), key=lambda item: item.name) ] window_view = WindowView( "Items", subviews=[ KeyAssignedListView( wielded_buttons, value_column_width=16, layout_options=LayoutOptions(left=0.1, width=0.3, height=0.3, top=0, right=None, bottom=None) ), KeyAssignedListView( worn_buttons, value_column_width=16, layout_options=LayoutOptions(left=0.1, width=0.3, height=0.3, top=0.3, right=None, bottom=None) ), KeyAssignedListView( inventory_buttons, value_column_width=16, layout_options=LayoutOptions(left=0.1, width=0.3, height=0.3, top=0.6, right=None, bottom=None) ) ] ) super().__init__(window_view) def describe_item(self, item): self.director.push_scene(ItemDetailWindow(item)) def terminal_read(self, val): super().terminal_read(val) if val == terminal.TK_ESCAPE: self.director.pop_scene() class ItemDetailWindow(UIScene): def __init__(self, chosen_item): self.covers_screen = False view = WindowView( chosen_item.name, layout_options=LayoutOptions(left=None, width=0.3, height=0.7, top=0.05, right=0.25, bottom=None), subviews=[ LabelView(chosen_item.description) ] ) super().__init__(view) def terminal_read(self, val): self.director.pop_scene()
{ "repo_name": "ChrisLR/Python-Roguelike-Template", "path": "scenes/game/windows/inventory_window.py", "copies": "1", "size": "2551", "license": "mit", "hash": 1680259433623869400, "line_mean": 33.472972973, "line_max": 119, "alpha_frac": 0.5578204626, "autogenerated": false, "ratio": 3.900611620795107, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4958432083395107, "avg_score": null, "num_lines": null }
from functools import partial from bearlibterminal import terminal from clubsandwich.ui import ( UIScene, WindowView, LayoutOptions, KeyAssignedListView, ButtonView ) class ChoicesResolutionWindow(UIScene): def __init__(self, callback, choice_name, choices): choices = [ ButtonView(choice.name if hasattr(choice, 'name') else str(choice), callback=partial(self.callback_and_pop_scene, callback, choice, choice_name)) for choice in choices ] window_view = WindowView( choice_name, subviews=[ KeyAssignedListView( choices, value_column_width=16, ), ], layout_options=LayoutOptions(left=0.25, width=0.3, height=0.5, top=0.25, right=None, bottom=None) ) super().__init__(window_view) @staticmethod def callback_and_pop_scene(callback, choice, choice_name): callback(choice, choice_name) def terminal_read(self, val): super().terminal_read(val) if val == terminal.TK_ESCAPE: self.director.pop_scene()
{ "repo_name": "ChrisLR/Python-Roguelike-Template", "path": "scenes/character_creation/choicesresolution.py", "copies": "1", "size": "1181", "license": "mit", "hash": 7554457862587222000, "line_mean": 28.525, "line_max": 109, "alpha_frac": 0.5850973751, "autogenerated": false, "ratio": 4.072413793103448, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5157511168203448, "avg_score": null, "num_lines": null }
from functools import partial from bearlibterminal import terminal from clubsandwich.ui import ( UIScene, WindowView, LayoutOptions, KeyAssignedListView, ButtonView ) class ItemQueryWindow(UIScene): def __init__(self, callback, wielded_items, worn_items, inventory_items): self.covers_screen = True wielded_buttons = [ ButtonView(item.name, callback=partial(self.callback_and_pop_scene, callback, item)) for item in sorted(set(wielded_items), key=lambda item: item.name) ] worn_buttons = [ ButtonView(item.name, callback=partial(self.callback_and_pop_scene, callback, item)) for item in sorted(set(worn_items), key=lambda item: item.name) ] inventory_buttons = [ ButtonView(item.name, callback=partial(self.callback_and_pop_scene, callback, item)) for item in sorted(set(inventory_items), key=lambda item: item.name) ] window_view = WindowView( "Items", subviews=[ KeyAssignedListView( wielded_buttons, value_column_width=16, layout_options=LayoutOptions(left=0.1, width=0.3, height=0.3, top=0, right=None, bottom=None) ), KeyAssignedListView( worn_buttons, value_column_width=16, layout_options=LayoutOptions(left=0.1, width=0.3, height=0.3, top=0.3, right=None, bottom=None) ), KeyAssignedListView( inventory_buttons, value_column_width=16, layout_options=LayoutOptions(left=0.1, width=0.3, height=0.3, top=0.6, right=None, bottom=None) ) ] ) super().__init__(window_view) def callback_and_pop_scene(self, callback, item): callback(item) self.director.pop_scene() def terminal_read(self, val): super().terminal_read(val) if val == terminal.TK_ESCAPE: self.director.pop_scene()
{ "repo_name": "ChrisLR/Python-Roguelike-Template", "path": "scenes/game/windows/item_query_window.py", "copies": "1", "size": "2149", "license": "mit", "hash": 4234347509565331000, "line_mean": 36.0517241379, "line_max": 119, "alpha_frac": 0.5625872499, "autogenerated": false, "ratio": 3.9287020109689212, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.49912892608689213, "avg_score": null, "num_lines": null }
from functools import partial from celery.schedules import crontab from celery.task import periodic_task from corehq.apps.users.models import CommCareUser from custom.ilsgateway.models import SupplyPointStatus, SupplyPointStatusTypes, SupplyPointStatusValues, \ DeliveryGroups from custom.ilsgateway.tanzania.reminders import REMINDER_DELIVERY_FACILITY, REMINDER_DELIVERY_DISTRICT, \ update_statuses from custom.ilsgateway.utils import send_for_day, send_translated_message import settings def send_delivery_reminder(domain, date, loc_type='FACILITY', test_list=None): if loc_type == 'FACILITY': status_type = SupplyPointStatusTypes.DELIVERY_FACILITY sms_text = REMINDER_DELIVERY_FACILITY elif loc_type == 'DISTRICT': status_type = SupplyPointStatusTypes.DELIVERY_DISTRICT sms_text = REMINDER_DELIVERY_DISTRICT else: return current_group = DeliveryGroups().current_delivering_group(date.month) sp_ids = set() users = CommCareUser.by_domain(domain) if not test_list else test_list for user in users: location = user.location if user.is_active and location and location.location_type == loc_type: status_exists = SupplyPointStatus.objects.filter( location_id=location.get_id, status_type=status_type, status_date__gte=date ).exists() groups = location.metadata.get('group', None) if groups and current_group in groups and not status_exists: send_translated_message(user, sms_text) update_statuses(sp_ids, status_type, SupplyPointStatusValues.REMINDER_SENT) facility_partial = partial(send_for_day, cutoff=15, f=send_delivery_reminder) district_partial = partial(send_for_day, cutoff=13, f=send_delivery_reminder, loc_type='DISTRICT') @periodic_task(run_every=crontab(day_of_month="13-15", hour=14, minute=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def first_facility_delivery_task(): facility_partial(15) @periodic_task(run_every=crontab(day_of_month="20-22", hour=14, minute=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def second_facility_delivery_task(): facility_partial(22) @periodic_task(run_every=crontab(day_of_month="26-30", hour=14, minute=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def third_facility_delivery_task(): facility_partial(30) @periodic_task(run_every=crontab(day_of_month="11-13", hour=8, minute=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def first_district_delivery_task(): district_partial(13) @periodic_task(run_every=crontab(day_of_month="18-20", hour=14, minute=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def second_district_delivery_task(): district_partial(20) @periodic_task(run_every=crontab(day_of_month="26-28", hour=14, minute=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def third_district_delivery_task(): district_partial(28)
{ "repo_name": "puttarajubr/commcare-hq", "path": "custom/ilsgateway/tanzania/reminders/delivery.py", "copies": "1", "size": "3034", "license": "bsd-3-clause", "hash": 3755592696485389300, "line_mean": 41.7323943662, "line_max": 134, "alpha_frac": 0.7201713909, "autogenerated": false, "ratio": 3.269396551724138, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9484406511462853, "avg_score": 0.0010322862322571554, "num_lines": 71 }
from functools import partial from .common import perform_regression_check, check_text_files class FileRegressionFixture: """ Implementation of `file_regression` fixture. """ def __init__(self, datadir, original_datadir, request): """ :type datadir: Path :type original_datadir: Path :type request: FixtureRequest """ self.request = request self.datadir = datadir self.original_datadir = original_datadir self.force_regen = False def check( self, contents, encoding=None, extension=".txt", newline=None, basename=None, fullpath=None, binary=False, obtained_filename=None, check_fn=None, ): """ Checks the contents against a previously recorded version, or generate a new file. :param str contents: content to be verified. :param str|None encoding: Encoding used to write file, if any. :param str extension: Extension of file. :param str|None newline: See `io.open` docs. :param bool binary: If the file is binary or text. :param obtained_filename: ..see:: FileRegressionCheck :param check_fn: a function with signature ``(obtained_filename, expected_filename)`` that should raise AssertionError if both files differ. If not given, use internal function which compares text using :py:mod:`difflib`. """ __tracebackhide__ = True if binary and encoding: raise ValueError( "Only binary ({!r}) or encoding ({!r}) parameters must be passed at the same time.".format( binary, encoding ) ) if binary: assert isinstance( contents, bytes ), "Expected bytes contents but received type {}".format( type(contents).__name__ ) else: assert isinstance( contents, str ), "Expected text/unicode contents but received type {}".format( type(contents).__name__ ) import io if check_fn is None: if binary: def check_fn(obtained_filename, expected_filename): if obtained_filename.read_bytes() != expected_filename.read_bytes(): raise AssertionError( "Binary files {} and {} differ.".format( obtained_filename, expected_filename ) ) else: check_fn = partial(check_text_files, encoding=encoding) def dump_fn(filename): mode = "wb" if binary else "w" with open(str(filename), mode, encoding=encoding, newline=newline) as f: f.write(contents) perform_regression_check( datadir=self.datadir, original_datadir=self.original_datadir, request=self.request, check_fn=check_fn, dump_fn=dump_fn, extension=extension, basename=basename, fullpath=fullpath, force_regen=self.force_regen, obtained_filename=obtained_filename, ) # non-PEP 8 alias used internally at ESSS Check = check
{ "repo_name": "ESSS/pytest-regressions", "path": "src/pytest_regressions/file_regression.py", "copies": "1", "size": "3406", "license": "mit", "hash": 190639397225493500, "line_mean": 31.4380952381, "line_max": 111, "alpha_frac": 0.5475631239, "autogenerated": false, "ratio": 4.8796561604584525, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5927219284358453, "avg_score": null, "num_lines": null }
from functools import partial from .._compat.enum import EnumMeta, Enum, Flag from .._compat.typing import Any, Callable, Optional __all__ = ['ExtensibleConstructorMeta', 'construct_with_alternatives', 'construct_union'] class ExtensibleConstructorMeta(EnumMeta): def __call__(cls, *args: Any, **kwargs: Any) -> Any: return cls.__new__(cls, *args, **kwargs) # type: ignore def extend_constructor( constructor: Callable[..., Enum] ) -> Callable[[EnumMeta], EnumMeta]: def decorator(cls: EnumMeta) -> EnumMeta: next_constructor = partial(cls.__new__, cls) def __new__(cls: EnumMeta, *args: Any, **kwargs: Any) -> Enum: return constructor(next_constructor, cls, *args, **kwargs) cls.__new__ = __new__ # type: ignore return cls return decorator def construct_with_alternatives( provider: Callable[..., Optional[Enum]] ) -> Callable[[EnumMeta], EnumMeta]: def constructor(next_constructor: Callable[..., Enum], cls: EnumMeta, *args: Any, **kwargs: Any) -> Enum: try: return next_constructor(*args, **kwargs) except ValueError: result = provider(cls, *args, **kwargs) if result is None: raise else: return result return extend_constructor(constructor) def _construct_union( next_constructor: Callable[[Any], Flag], cls: ExtensibleConstructorMeta, *args: Any ) -> Any: if args: ret, *rest = iter(next_constructor(arg) for arg in args) for value in rest: ret |= value return ret else: return next_constructor(0) construct_union = extend_constructor(_construct_union)
{ "repo_name": "dmilith/SublimeText3-dmilith", "path": "Packages/sublime_lib/st3/sublime_lib/_util/enum.py", "copies": "1", "size": "1739", "license": "mit", "hash": 3272378176828795400, "line_mean": 27.0483870968, "line_max": 89, "alpha_frac": 0.6083956297, "autogenerated": false, "ratio": 4.063084112149533, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5171479741849533, "avg_score": null, "num_lines": null }
from functools import partial from .compat import PY2, ustr, bstr, urlparse def merge_dicts(*dicts, **kwargs): """Merges dicts and kwargs into one dict""" result = {} for d in dicts: result.update(d) result.update(kwargs) return result def parse_qs(qs): """Helper func to parse query string with py2/py3 compatibility Ensures that dict keys are native strings. """ result = {} qs = bstr(qs, 'latin1') pairs = [s2 for s1 in qs.split(b'&') for s2 in s1.split(b';')] uq = urlparse.unquote if PY2 else urlparse.unquote_to_bytes for name_value in pairs: if not name_value: continue nv = name_value.split(b'=', 1) if len(nv) != 2: nv.append(b'') name = nv[0].replace(b'+', b' ') name = uq(name) if not PY2: # pragma: no py2 cover name = ustr(name, 'latin1') value = nv[1].replace(b'+', b' ') value = uq(value) result.setdefault(name, []).append(value) return result def clone(src, **kwargs): """Clones object with optionally overridden fields""" obj = object.__new__(type(src)) obj.__dict__.update(src.__dict__) obj.__dict__.update(kwargs) return obj def wrap_in(key): """Wraps value in dict ``{key: value}``""" return lambda val: {key: val} class ContexAware(object): def __call__(self, ctx, data): return data # pragma: no cover def check(self, ctx): pass # pragma: no cover def ensure_context(ctx, func, right=True): if isinstance(func, ContexAware): # pragma: no cover assert right, 'Context-aware function must be attached to right side' assert ctx, 'Context needed' func.check(ctx) return partial(func, ctx) return func def dpass(value): """Allows complex inline expressions in decorator For example:: @dpass(params(arg=int) | (lambda r: {'arg': r['arg'] + 10})) def boo(request, arg): pass Is equivalent of:: d = params(arg=int) | (lambda r: {'arg': r['arg'] + 10}) @d def boo(request, arg): pass """ return value
{ "repo_name": "baverman/covador", "path": "covador/utils.py", "copies": "1", "size": "2190", "license": "mit", "hash": 1240223321821180200, "line_mean": 23.8863636364, "line_max": 77, "alpha_frac": 0.5739726027, "autogenerated": false, "ratio": 3.566775244299674, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9640747846999674, "avg_score": 0, "num_lines": 88 }
from functools import partial from .config import BOROUGHS from .error import GeosupportError from .function_info import ( FUNCTIONS, AUXILIARY_SEGMENT_LENGTH, WORK_AREA_LAYOUTS ) def list_of(length, callback, v): output = [] i = 0 # While the next entry isn't blank while v[i:i+length].strip() != '': output.append(callback(v[i:i+length])) i += length return output def list_of_items(length): return partial(list_of, length, lambda v: v.strip()) def list_of_workareas(name, length): return partial( list_of, length, lambda v: parse_workarea(WORK_AREA_LAYOUTS['output'][name], v) ) def list_of_nodes(v): return list_of( 160, lambda w: list_of(32, list_of_items(8), w), v ) def borough(v): if v: v2 = str(v).strip().upper() if v2.isdigit(): return str(v2) if v2 in BOROUGHS: return str(BOROUGHS[v2]) raise GeosupportError("%s is not a valid borough" % v) else: return '' def function(v): v = str(v).upper().strip() if v in FUNCTIONS: v = FUNCTIONS[v]['function'] return v def flag(true, false): def f(v): if type(v) == bool: return true if v else false if v: return str(v).strip().upper()[:1] else: return false return f FORMATTERS = { # Format input 'function': function, 'borough': borough, # Flags 'auxseg': flag('Y', 'N'), 'cross_street_names': flag('E', ''), 'long_work_area_2': flag('L', ''), 'mode_switch': flag('X', ''), 'real_streets_only': flag('R', ''), 'roadbed_request_switch': flag('R', ''), 'street_name_normalization': flag('C', ''), 'tpad': flag('Y', 'N'), # Parse certain output differently 'LGI': list_of_workareas('LGI', 53), 'LGI-extended': list_of_workareas('LGI-extended', 116), 'BINs': list_of_workareas('BINs', 7), 'BINs-tpad': list_of_workareas('BINs-tpad', 8), 'intersections': list_of_workareas('INTERSECTION', 55), 'node_list': list_of_nodes, # Census Tract formatter 'CT': lambda v: '' if v is None else v.replace(' ', '0'), # Default formatter '': lambda v: '' if v is None else str(v).strip().upper() } def get_formatter(name): if name in FORMATTERS: return FORMATTERS[name] elif name.isdigit(): return list_of_items(int(name)) def set_mode(mode): flags = {} if mode: if mode == 'extended': flags['mode_switch'] = True if 'long' in mode: flags['long_work_area_2'] = True if 'tpad' in mode: flags['tpad'] = True return flags def get_mode(flags): if flags['mode_switch']: return 'extended' elif flags['long_work_area_2'] and flags['tpad']: return 'long+tpad' elif flags['long_work_area_2']: return 'long' else: return 'regular' def get_flags(wa1): layout = WORK_AREA_LAYOUTS['input']['WA1'] flags = { 'function': parse_field(layout['function'], wa1), 'mode_switch': parse_field(layout['mode_switch'], wa1) == 'X', 'long_work_area_2': parse_field(layout['long_work_area_2'], wa1) == 'L', 'tpad': parse_field(layout['tpad'], wa1) == 'Y', 'auxseg': parse_field(layout['auxseg'], wa1) == 'Y' } flags['mode'] = get_mode(flags) return flags def create_wa1(kwargs): kwargs['work_area_format'] = 'C' b = bytearray(b' '*1200) mv = memoryview(b) layout = WORK_AREA_LAYOUTS['input']['WA1'] for key, value in kwargs.items(): formatter = get_formatter(layout[key]['formatter']) value = '' if value is None else str(formatter(value)) i = layout[key]['i'] length = i[1]-i[0] mv[i[0]:i[1]] = value.ljust(length)[:length].encode() return str(b.decode()) def create_wa2(flags): length = FUNCTIONS[flags['function']][flags['mode']] if length is None: return None if flags['auxseg']: length += AUXILIARY_SEGMENT_LENGTH return ' ' * length def format_input(kwargs): wa1 = create_wa1(kwargs) flags = get_flags(wa1) if flags['function'] not in FUNCTIONS: raise GeosupportError('INVALID FUNCTION CODE', {}) wa2 = create_wa2(flags) return flags, wa1, wa2 def parse_field(field, wa): i = field['i'] formatter = get_formatter(field['formatter']) return formatter(wa[i[0]:i[1]]) def parse_workarea(layout, wa): output = {} for key in layout: if 'i' in layout[key]: output[key] = parse_field(layout[key], wa) else: output[key] = {} for subkey in layout[key]: output[key][subkey] = parse_field(layout[key][subkey], wa) return output def parse_output(flags, wa1, wa2): output = {} output.update(parse_workarea(WORK_AREA_LAYOUTS['output']['WA1'], wa1)) function_name = flags['function'] if function_name in WORK_AREA_LAYOUTS['output']: output.update(parse_workarea( WORK_AREA_LAYOUTS['output'][function_name], wa2 )) function_mode = function_name + '-' + flags['mode'] if function_mode in WORK_AREA_LAYOUTS['output']: output.update(parse_workarea( WORK_AREA_LAYOUTS['output'][function_mode], wa2 )) if flags['auxseg']: output.update(parse_workarea( WORK_AREA_LAYOUTS['output']['AUXSEG'], wa2[-AUXILIARY_SEGMENT_LENGTH:] )) return output
{ "repo_name": "ishiland/python-geosupport", "path": "geosupport/io.py", "copies": "1", "size": "5589", "license": "mit", "hash": 1956421603428189400, "line_mean": 24.2895927602, "line_max": 80, "alpha_frac": 0.5702272321, "autogenerated": false, "ratio": 3.2646028037383177, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9312443028189282, "avg_score": 0.004477401529807088, "num_lines": 221 }
from functools import partial # from ..config_new import BTE_FILTERS BTE_FILTERS = ["nodeDegree", "ngd", "drugPhase", "survivalProbability"] def filter_response(res, criteria): """ Filter API response based on filtering criteria :param res: API Response :param criteria: filtering criteria """ def filter_by_operation(rec, key, val, operation): if rec.get(key): if isinstance(rec.get(key), list): rec[key] = rec[key][0] try: if operation == "=" and type(val)(rec[key]) == val: return True if operation == ">" and type(val)(rec[key]) > val: return True if operation == "<" and type(val)(rec[key]) < val: return True return False except (ValueError, TypeError): return False return False if not res or not isinstance(res, list) or not len(res) > 0: return res if not isinstance(criteria, dict): return res for f, v in criteria.items(): if not isinstance(v, dict): continue if f not in BTE_FILTERS: if "=" in v: res = list( filter( partial(filter_by_operation, key=f, val=v["="], operation="="), res, ) ) continue if ">" in v: res = list( filter( partial(filter_by_operation, key=f, val=v[">"], operation=">"), res, ) ) elif "<" in v: res = list( filter( partial(filter_by_operation, key=f, val=v["<"], operation="<"), res, ) ) return res
{ "repo_name": "biothings/biothings_explorer", "path": "biothings_explorer/call_apis/filter.py", "copies": "1", "size": "1945", "license": "apache-2.0", "hash": -2302402724256495400, "line_mean": 31.4166666667, "line_max": 87, "alpha_frac": 0.4370179949, "autogenerated": false, "ratio": 4.664268585131895, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5601286580031895, "avg_score": null, "num_lines": null }
from functools import partial from corpustools.corpus.classes import Word from corpustools.symbolsim.edit_distance import edit_distance from corpustools.symbolsim.khorsi import khorsi from corpustools.symbolsim.phono_edit_distance import phono_edit_distance from corpustools.symbolsim.phono_align import Aligner from corpustools.multiproc import filter_mp, score_mp def _is_edit_distance_neighbor(w, query, sequence_type, max_distance): w_len = len(getattr(w, sequence_type)) query_len = len(getattr(query, sequence_type)) if w_len > query_len+max_distance: return False if w_len < query_len-max_distance: return False return edit_distance(w, query, sequence_type, max_distance) <= max_distance def _is_phono_edit_distance_neighbor(w, query, sequence_type, specifier, max_distance): return phono_edit_distance(w, query, sequence_type, specifier) <= max_distance def _is_khorsi_neighbor(w, query, freq_base, sequence_type, max_distance): return khorsi(w, query, freq_base, sequence_type, max_distance) >= max_distance def neighborhood_density_all_words(corpus_context, tierdict, tier_type = None, sequence_type = None, algorithm = 'edit_distance', max_distance = 1, output_format = 'spelling', num_cores = -1, settable_attr = None, collapse_homophones = False, stop_check = None, call_back = None): """Calculate the neighborhood density of all words in the corpus and adds them as attributes of the words. Parameters ---------- corpus_context : CorpusContext Context manager for a corpus algorithm : str The algorithm used to determine distance max_distance : float, optional Maximum edit distance from the queried word to consider a word a neighbor. stop_check : callable, optional Optional function to check whether to gracefully terminate early call_back : callable, optional Optional function to supply progress information during the function settable_attr: string Name of attribute that neighbourhood density results will be assigned to """ function = partial(neighborhood_density, corpus_context, tierdict = tierdict, tier_type = tier_type, sequence_type = sequence_type, algorithm = algorithm, max_distance = max_distance, collapse_homophones = collapse_homophones) if call_back is not None: call_back('Calculating neighborhood densities...') call_back(0,len(corpus_context)) cur = 0 results = dict() last_value_removed = None last_key_removed = None if num_cores == -1 or num_cores == 1: for w in corpus_context: if stop_check is not None and stop_check(): return if last_value_removed: tierdict[last_key_removed].append(last_value_removed) w_sequence = getattr(w, corpus_context.sequence_type) last_key_removed = str(w_sequence) for i, item in enumerate(tierdict[last_key_removed]): if str(item) == str(w): last_value_removed = tierdict[last_key_removed].pop(i) break res = neighborhood_density(corpus_context, w, tierdict, tier_type = tier_type, sequence_type = sequence_type, algorithm = algorithm, max_distance = max_distance, collapse_homophones = collapse_homophones) results[str(w)] = [getattr(r, output_format) for r in res[1]] setattr(w.original, settable_attr.name, res[0]) # for w in corpus_context: # if stop_check is not None and stop_check(): # return # cur += 1 # call_back(cur) # res = function(w) # results[str(w)] = [getattr(r, output_format) for r in res[1]] # setattr(w.original, settable_attr.name, res[0]-1) # #the -1 is to account for the fact that words are counted as their own neighbour, and this is incorrect # #subtracting 1 here is easier than fixing the neighbourhood density algorithm else: iterable = ((w,) for w in corpus_context) neighbors = score_mp(iterable, function, num_cores, call_back, stop_check, chunk_size = 1) for n in neighbors: #Have to look up the key, then look up the object due to how #multiprocessing pickles objects setattr(corpus_context.corpus.find(corpus_context.corpus.key(n[0])), #corpus_context.attribute.name, n[1][0]) settable_attr.name, n[1][0]) return results def neighborhood_density(corpus_context, query, tierdict, algorithm = 'edit_distance', max_distance = 1, collapse_homophones = False, force_quadratic = False, file_type = None, tier_type=None, sequence_type = None, stop_check = None, call_back = None): """Calculate the neighborhood density of a particular word in the corpus. Parameters ---------- corpus_context : CorpusContext Context manager for a corpus query : Word The word whose neighborhood density to calculate. algorithm : str The algorithm used to determine distance max_distance : float, optional Maximum edit distance from the queried word to consider a word a neighbor force_quadratic : bool Force use of the less efficient quadratic algorithm even when finding edit distance of 1 neighborhoods stop_check : callable, optional Optional function to check whether to gracefully terminate early call_back : callable, optional Optional function to supply progress information during the function Returns ------- tuple(int, set) Tuple of the number of neighbors and the set of neighbor Words. """ matches = [] query = ensure_query_is_word(query, corpus_context, corpus_context.sequence_type, tier_type) if call_back is not None: call_back('Finding neighbors for {}...'.format(query)) call_back(0,len(corpus_context)) cur = 0 if algorithm == 'edit_distance' and max_distance == 1 and not force_quadratic: return fast_neighborhood_density(corpus_context, query, corpus_context.sequence_type, tier_type, tierdict, file_type=file_type, collapse_homophones=collapse_homophones) if algorithm == 'edit_distance': is_neighbor = partial(_is_edit_distance_neighbor, sequence_type = corpus_context.sequence_type, max_distance = max_distance) elif algorithm == 'phono_edit_distance': is_neighbor = partial(_is_phono_edit_distance_neighbor, specifier = corpus_context.specifier, sequence_type = corpus_context.sequence_type, max_distance = max_distance) elif algorithm == 'khorsi': freq_base = corpus_context.get_frequency_base() is_neighbor = partial(_is_khorsi_neighbor, freq_base = freq_base, sequence_type = corpus_context.sequence_type, max_distance = max_distance) for w in corpus_context: if stop_check is not None and stop_check(): return if call_back is not None: cur += 1 if cur % 10 == 0: call_back(cur) if not is_neighbor(w, query): continue matches.append(w) neighbors = set(matches)-set([query]) return (len(neighbors), neighbors) def fast_neighborhood_density(corpus_context, query, sequence_type, tier_type, tierdict, file_type=None, trans_delimiter='.', collapse_homophones = False): """Generates all neighbors of edit distance <= 1 and searches for them in corpus_context. Will be faster than neighborhood_density when: n > m * (1 + s), where n: number of words in corpus m: length of query s: size of segment inventory """ neighbors = list() query = ensure_query_is_word(query, corpus_context, sequence_type, tier_type, file_type=file_type) for candidate in generate_neighbor_candidates(corpus_context, query, sequence_type): if tier_type.att_type == 'tier': cand_str = trans_delimiter.join(candidate) else: cand_str = ''.join(candidate) if cand_str in tierdict: for w in tierdict[cand_str]: w_sequence = getattr(w, sequence_type) if collapse_homophones and any(getattr(word, sequence_type) == w_sequence for word in neighbors): continue else: neighbors.append(w) return (len(neighbors), neighbors) def generate_neighbor_candidates(corpus_context, query, sequence_type): sequence = getattr(query, sequence_type) yield [str(c) for c in sequence] for i in range(len(sequence)): yield [str(c) for c in sequence[:i]] + [str(c) for c in sequence[i+1:]] # deletion for char in corpus_context.inventory: if str(char) not in ['#', sequence[i]]: yield [str(c) for c in sequence[:i]] + [str(char)] + [str(c) for c in sequence[i:]] # insertion yield [str(c) for c in sequence[:i]] + [str(char)] + [str(c) for c in sequence[i+1:]] # substitution for char in corpus_context.inventory: # final pass to get insertion at len+1 if str(char) not in ['#', sequence[i]]: yield [str(c) for c in sequence[:]] + [str(char)] # insertion def find_mutation_minpairs_all_words(corpus_context, tierdict, tier_type = None, num_cores = -1, collapse_homophones = False, stop_check = None, call_back = None): function = partial(find_mutation_minpairs, corpus_context, tier_type=tier_type, collapse_homophones = collapse_homophones) if call_back is not None: call_back('Calculating neighborhood densities...') call_back(0,len(corpus_context)) cur = 0 results = dict() last_value_removed = None last_key_removed = None if num_cores == -1 or num_cores == 1: for w in corpus_context: if stop_check is not None and stop_check(): return if last_value_removed: tierdict[last_key_removed].append(last_value_removed) w_sequence = getattr(w, corpus_context.sequence_type) last_key_removed = str(w_sequence) for i, item in enumerate(tierdict[last_key_removed]): if str(item) == str(w): last_value_removed = tierdict[last_key_removed].pop(i) break res = find_mutation_minpairs(corpus_context, w, tier_type=tier_type, collapse_homophones = collapse_homophones) results[str(w)] = res[1] setattr(w.original, corpus_context.attribute.name, res[0]) # for w in corpus_context: # if stop_check is not None and stop_check(): # return # cur += 1 # call_back(cur) # res = function(w) # results[str(w)] = res[1]#[str(r) for r in res[1]] # setattr(w.original, corpus_context.attribute.name, res[0]) else: iterable = ((w,) for w in corpus_context) neighbors = score_mp(iterable, function, num_cores, call_back, stop_check, chunk_size= 1) for n in neighbors: #Have to look up the key, then look up the object due to how #multiprocessing pickles objects setattr(corpus_context.corpus.find(corpus_context.corpus.key(n[0])), corpus_context.attribute.name, n[1][0]) return results def find_mutation_minpairs(corpus_context, query, tier_type = None, collapse_homophones = False, stop_check = None, call_back = None): """Find all minimal pairs of the query word based only on segment mutations (not deletions/insertions) Parameters ---------- corpus_context : CorpusContext Context manager for a corpus query : Word The word whose minimal pairs to find stop_check : callable or None Optional function to check whether to gracefully terminate early call_back : callable or None Optional function to supply progress information during the function Returns ------- list The found minimal pairs for the queried word """ matches = [] sequence_type = corpus_context.sequence_type query = ensure_query_is_word(query, corpus_context, corpus_context.sequence_type, tier_type) if call_back is not None: call_back('Finding neighbors...') call_back(0,len(corpus_context)) cur = 0 al = Aligner(features_tf=False, ins_penalty=float('inf'), del_penalty=float('inf'), sub_penalty=1) for w in corpus_context: w_sequence = getattr(w, sequence_type) query_sequence = getattr(query, sequence_type) if stop_check is not None and stop_check(): return if call_back is not None: cur += 1 if cur % 10 == 0: call_back(cur) if (len(w_sequence) > len(query_sequence)+1 or len(w_sequence) < len(query_sequence)-1): continue m = al.make_similarity_matrix(query_sequence, w_sequence) if m[-1][-1]['f'] != 1: continue w_sequence = getattr(w, sequence_type) if collapse_homophones and any(getattr(m, sequence_type) == w_sequence for m in matches): continue else: #matches.append(str(w_sequence)) matches.append(w) matches = [m.spelling for m in matches] neighbors = list(set(matches)-set([str(query_sequence)])) return (len(neighbors), neighbors) def ensure_query_is_word(query, corpus, sequence_type, tier_type, trans_delimiter='.', file_type=None): if isinstance(query, Word): query_word = query else: if tier_type.att_type == 'spelling': if file_type == sequence_type: query_word = Word(**{sequence_type: list(query)}) else: query_word = query.replace(trans_delimiter, '') query_word = Word(**{sequence_type: list(query_word)}) elif tier_type.att_type == 'tier': if file_type == sequence_type: new_query = parse(query, trans_delimiter) query_word = Word(**{sequence_type: new_query}) else: try: query_word = corpus.corpus.find(query) except KeyError: new_query = parse(query, trans_delimiter) query_word = Word(**{sequence_type: list(new_query)}) return query_word def parse(word, delimiter): return word.split(delimiter) if delimiter in word else list(word)
{ "repo_name": "PhonologicalCorpusTools/CorpusTools", "path": "corpustools/neighdens/neighborhood_density.py", "copies": "1", "size": "15290", "license": "bsd-3-clause", "hash": 8117071112777940000, "line_mean": 42.4403409091, "line_max": 126, "alpha_frac": 0.6026160889, "autogenerated": false, "ratio": 4.058932837801964, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5161548926701964, "avg_score": null, "num_lines": null }
from functools import partial from crontabber.mixins import ( with_transactional_resource, with_resource_connection_as_argument, with_single_transaction ) #============================================================================== # dedicated hbase mixins #------------------------------------------------------------------------------ # this class decorator adds attributes to the class in the form: # self.long_term_storage_connection # self.long_term_storage_transaction # when using this definition as a class decorator, it is necessary to use # parenthesis as it is a function call: # @with_postgres_transactions() # class MyClass ... with_hbase_transactions = partial( with_transactional_resource, 'socorro.external.hb.connection_context.ConnectionContext', 'long_term_storage' ) #------------------------------------------------------------------------------ # this class decorator adds a _run_proxy method to the class that will # acquire a database connection and then pass it to the invocation of the # class' "run" method. Since the connection is in the form of a # context manager, the connection will automatically be closed when "run" # completes. # when using this definition as a class decorator, it is necessary to use # parenthesis as it is a function call: # @with_postgres_transactions() # class MyClass ... with_hbase_connection_as_argument = partial( with_resource_connection_as_argument, 'long_term_storage' ) #------------------------------------------------------------------------------ # this class decorator adds a _run_proxy method to the class that will # call the class' run method in the context of a database transaction. It # passes the connection to the "run" function. When "run" completes without # raising an exception, the transaction will be commited if the connection # context class understands transactions. The default HBase connection does not # do transactions # when using this definition as a class decorator, it is necessary to use # parenthesis as it is a function call: # @with_postgres_transactions() # class MyClass ... with_single_hb_transaction = partial( with_single_transaction, 'long_term_storage' ) #============================================================================== # dedicated rabbitmq mixins #------------------------------------------------------------------------------ # this class decorator adds attributes to the class in the form: # self.queuing_connection # self.queuing_transaction # when using this definition as a class decorator, it is necessary to use # parenthesis as it is a function call: # @with_postgres_transactions() # class MyClass ... with_rabbitmq_transactions = partial( with_transactional_resource, 'socorro.external.rabbitmq.connection_context.ConnectionContext', 'queuing' ) #------------------------------------------------------------------------------ # this class decorator adds a _run_proxy method to the class that will # acquire a database connection and then pass it to the invocation of the # class' "run" method. Since the connection is in the form of a # context manager, the connection will automatically be closed when "run" # completes. # when using this definition as a class decorator, it is necessary to use # parenthesis as it is a function call: # @with_postgres_transactions() # class MyClass ... with_rabbitmq_connection_as_argument = partial( with_resource_connection_as_argument, 'queuing' ) #------------------------------------------------------------------------------ # this class decorator adds a _run_proxy method to the class that will # call the class' run method in the context of a database transaction. It # passes the connection to the "run" function. When "run" completes without # raising an exception, the transaction will be commited if the connection # context class understands transactions. The default RabbitMQ connection does # not do transactions # when using this definition as a class decorator, it is necessary to use # parenthesis as it is a function call: # @with_postgres_transactions() # class MyClass ... with_single_rabbitmq_transaction = partial( with_single_transaction, 'queuing' )
{ "repo_name": "Tchanders/socorro", "path": "socorro/cron/mixins.py", "copies": "13", "size": "4272", "license": "mpl-2.0", "hash": -7786449680920796000, "line_mean": 43.0412371134, "line_max": 79, "alpha_frac": 0.6399812734, "autogenerated": false, "ratio": 4.598493003229279, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": null, "num_lines": null }
from functools import partial from ctrl_base import CtrlBase from pstypes import UIType, Attr from com import message class PMCtrlBase(CtrlBase): def __init__(self, control, defaultValue): super(PMCtrlBase, self).__init__(control, defaultValue) def retrieveControlName(self): return self.control.shortName() class PMCtrlSimple(PMCtrlBase): def __init__(self, globalDefault, *args, **kwargs): super(PMCtrlSimple, self).__init__(*args, **kwargs) self.defaultValueGlobal = globalDefault self.setupGetSetVars(Attr.Value, self.control.getValue, self.control.setValue) class PMCtrlColorSliderGrp(PMCtrlBase): def __init__(self, *args, **kwargs): super(PMCtrlColorSliderGrp, self).__init__(*args, **kwargs) self.defaultValueGlobal = (0, 0, 0) self.setupGetSetVars(Attr.Value, self.control.getRgbValue, self.control.setRgbValue) class PMCtrlFrameLayout(PMCtrlBase): def __init__(self, *args, **kwargs): super(PMCtrlFrameLayout, self).__init__(*args, **kwargs) self.defaultValueGlobal = False self.setupGetSetVars(Attr.Value, self.control.getCollapse, self.control.setCollapse) class PMCtrlRadioButton(PMCtrlBase): def __init__(self, *args, **kwargs): super(PMCtrlRadioButton, self).__init__(*args, **kwargs) self.defaultValueGlobal = False self.setupGetSetVars(Attr.Value, self.control.getSelect, self.control.setSelect) class PMCtrlRadioButtonGrp(PMCtrlBase): def __init__(self, *args, **kwargs): super(PMCtrlRadioButtonGrp, self).__init__(*args, **kwargs) self.defaultValueGlobal = 1 self.setupGetSetVars(Attr.Value, self.control.getSelect, self.control.setSelect) class PMCtrlOptionMenu(PMCtrlBase): def __init__(self, *args, **kwargs): super(PMCtrlOptionMenu, self).__init__(*args, **kwargs) self.defaultValueGlobal = 1 def ctrl2DataProcedure(self): self.setAttr(Attr.CurrentIndex, self.control.getSelect()) def data2CtrlProcedure(self): prefValue = self.getAttr(Attr.CurrentIndex) if 0 < prefValue <= self.control.getNumberOfItems(): self.control.setSelect(prefValue) class PMCtrlTabLayout(PMCtrlBase): def __init__(self, *args, **kwargs): super(PMCtrlTabLayout, self).__init__(*args, **kwargs) self.defaultValueGlobal = 1 def ctrl2DataProcedure(self): self.setAttr(Attr.Value, self.control.getSelectTabIndex()) def data2CtrlProcedure(self): prefValue = self.getAttr(Attr.Value) if 0 < prefValue <= self.control.getNumberOfChildren(): self.control.setSelectTabIndex(prefValue) class PMCtrlTextField(PMCtrlBase): def __init__(self, *args, **kwargs): super(PMCtrlTextField, self).__init__(*args, **kwargs) self.defaultValueGlobal = '' self.setupGetSetVars(Attr.Value, self.control.getText, self.control.setText) class PMCtrlGrp4Simple(PMCtrlBase): def __init__(self, globalDefault, grpSize, *args, **kwargs): super(PMCtrlGrp4Simple, self).__init__(*args, **kwargs) self.defaultValueGlobal = globalDefault self.grpSize = grpSize self.getters = { 0: self.control.getValue1, 1: self.control.getValue2, 2: self.control.getValue3, 3: self.control.getValue4 } self.setters = { 0: self.control.setValue1, 1: self.control.setValue2, 2: self.control.setValue3, 3: self.control.setValue4 } # noinspection PyCallingNonCallable def ctrl2DataProcedure(self): self.setAttr(Attr.Value, [self.getters[i]() for i in range(self.grpSize)]) # noinspection PyCallingNonCallable def data2CtrlProcedure(self): prefValue = self.getAttr(Attr.Value) for i in range(self.grpSize): self.setters[i](prefValue[i]) class PMCtrlScrollLayout(PMCtrlBase): def __init__(self, *args, **kwargs): super(PMCtrlScrollLayout, self).__init__(*args, **kwargs) self.defaultValueGlobal = (0, 0) def ctrl2DataProcedure(self): self.setAttr(Attr.ScrollValues, self.control.getScrollAreaValue()) def data2CtrlProcedure(self): currentScrollDown, currentScrollRight = self.control.getScrollAreaValue() prefValue = self.getAttr(Attr.ScrollValues) self.control.scrollByPixel(['up', currentScrollDown]) self.control.scrollByPixel(['left', currentScrollRight]) self.control.scrollByPixel(['down', prefValue[0]]) self.control.scrollByPixel(['right', prefValue[1]]) class PMCtrlTextScrollList(PMCtrlBase): def __init__(self, *args, **kwargs): super(PMCtrlTextScrollList, self).__init__(*args, **kwargs) self.defaultValueGlobal = () def ctrl2DataProcedure(self): self.setAttr(Attr.SelectedIndexes, self.control.getSelectIndexedItem() or []) def data2CtrlProcedure(self): self.control.deselectAll() # in case of invalid index. IconTextScrollList does not have a method for getting number of items. # so i cannot check a validity of index. try: self.control.setSelectIndexedItem(self.getAttr(Attr.SelectedIndexes)) except RuntimeError: pass class PMCtrlScriptTable(PMCtrlBase): def __init__(self, *args, **kwargs): super(PMCtrlScriptTable, self).__init__(*args, **kwargs) self.defaultValueGlobal = (0, 0) def ctrl2DataProcedure(self): controlData = self.control.getSelectedCells() self.setAttr(Attr.SelectedIndexes, [0, 0] if controlData is None else controlData) def data2CtrlProcedure(self): self.control.setSelectedCells(self.getAttr(Attr.SelectedIndexes)) constructors = { UIType.PMCheckBox: partial(PMCtrlSimple, False), UIType.PMCheckBoxGrp1: partial(PMCtrlGrp4Simple, [False], 1), UIType.PMCheckBoxGrp2: partial(PMCtrlGrp4Simple, [False, False], 2), UIType.PMCheckBoxGrp3: partial(PMCtrlGrp4Simple, [False, False, False], 3), UIType.PMCheckBoxGrp4: partial(PMCtrlGrp4Simple, [False, False, False, False], 4), UIType.PMColorSliderGrp: PMCtrlColorSliderGrp, UIType.PMFloatField: partial(PMCtrlSimple, 0), UIType.PMFloatFieldGrp1: partial(PMCtrlGrp4Simple, [0], 1), UIType.PMFloatFieldGrp2: partial(PMCtrlGrp4Simple, [0, 0], 2), UIType.PMFloatFieldGrp3: partial(PMCtrlGrp4Simple, [0, 0, 0], 3), UIType.PMFloatFieldGrp4: partial(PMCtrlGrp4Simple, [0, 0, 0, 0], 4), UIType.PMFloatScrollBar: partial(PMCtrlSimple, 0), UIType.PMFloatSlider: partial(PMCtrlSimple, 0), UIType.PMFloatSliderGrp: partial(PMCtrlSimple, 0), UIType.PMFrameLayout: PMCtrlFrameLayout, UIType.PMIconTextCheckBox: partial(PMCtrlSimple, False), UIType.PMIconTextRadioButton: PMCtrlRadioButton, UIType.PMIconTextScrollList: PMCtrlTextScrollList, UIType.PMIntField: partial(PMCtrlSimple, 0), UIType.PMIntFieldGrp1: partial(PMCtrlGrp4Simple, [0], 1), UIType.PMIntFieldGrp2: partial(PMCtrlGrp4Simple, [0, 0], 2), UIType.PMIntFieldGrp3: partial(PMCtrlGrp4Simple, [0, 0, 0], 3), UIType.PMIntFieldGrp4: partial(PMCtrlGrp4Simple, [0, 0, 0, 0], 4), UIType.PMIntScrollBar: partial(PMCtrlSimple, 0), UIType.PMIntSlider: partial(PMCtrlSimple, 0), UIType.PMIntSliderGrp: partial(PMCtrlSimple, 0), UIType.PMOptionMenu: PMCtrlOptionMenu, UIType.PMOptionMenuGrp: PMCtrlOptionMenu, UIType.PMRadioButton: PMCtrlRadioButton, UIType.PMRadioButtonGrp1: PMCtrlRadioButtonGrp, UIType.PMRadioButtonGrp2: PMCtrlRadioButtonGrp, UIType.PMRadioButtonGrp3: PMCtrlRadioButtonGrp, UIType.PMRadioButtonGrp4: PMCtrlRadioButtonGrp, UIType.PMSymbolCheckBox: partial(PMCtrlSimple, False), UIType.PMScriptTable: PMCtrlScriptTable, UIType.PMScrollField: PMCtrlTextField, UIType.PMScrollLayout: PMCtrlScrollLayout, UIType.PMShelfTabLayout: PMCtrlTabLayout, UIType.PMTabLayout: PMCtrlTabLayout, UIType.PMTextField: PMCtrlTextField, UIType.PMTextFieldButtonGrp: PMCtrlTextField, UIType.PMTextFieldGrp: PMCtrlTextField, UIType.PMTextScrollList: PMCtrlTextScrollList } # noinspection PyCallingNonCallable def getController(uiType, control, defaultValue): if uiType in constructors: return constructors[uiType](control, defaultValue) else: message('Cannot create controller: Unknown controller type: {0}.'.format(str(uiType)))
{ "repo_name": "theetcher/fxpt", "path": "fxpt/fx_prefsaver/ctrl_pymel.py", "copies": "1", "size": "8506", "license": "mit", "hash": -7104016940069467000, "line_mean": 36.3070175439, "line_max": 106, "alpha_frac": 0.6971549494, "autogenerated": false, "ratio": 3.330462020360219, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9524159558292956, "avg_score": 0.000691482293452634, "num_lines": 228 }
from functools import partial from ..db import db def ForeignKey(parent_field): return db.Column( db.Integer, db.ForeignKey(parent_field, ondelete='CASCADE'), nullable=False ) class Model(db.Model): __abstract__ = True id = db.Column(db.Integer, primary_key=True) created_on = db.Column(db.DateTime, default=db.func.now()) updated_on = db.Column(db.DateTime, default=db.func.now(), onupdate=db.func.now()) @classmethod def _create_query(cls, dictionary): query = cls.query for k, v in dictionary.items(): query = query.filter(getattr(cls, k) == v) return query @classmethod def find_one(cls, **kw): return cls._create_query(kw).first() @classmethod def find_all(cls, **kw): return cls._create_query(kw).all() @classmethod def find_or_create(cls, **kw): return cls.find_one(**kw) or cls.create(**kw) @classmethod def create(cls, *args, **kw): instance = cls(*args, **kw) instance.save() def save(self): db.session.add(self) db.session.commit()
{ "repo_name": "azlyth/tokens", "path": "backend/core/models/base.py", "copies": "1", "size": "1142", "license": "unlicense", "hash": -7216359902617003000, "line_mean": 21.84, "line_max": 86, "alpha_frac": 0.5963222417, "autogenerated": false, "ratio": 3.56875, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.46650722417, "avg_score": null, "num_lines": null }
from functools import partial from diamondash import utils def skip_nulls(datapoints): return [ d for d in datapoints if d['y'] is not None and d['x'] is not None] def zeroize_nulls(datapoints): return [ d if d['y'] is not None else {'x': d['x'], 'y': 0} for d in datapoints if d['x'] is not None] def agg_max(vals): return max(vals) if vals else 0 def agg_min(vals): return min(vals) if vals else 0 def agg_avg(vals): return sum(vals) / len(vals) if vals else 0 class Summarizer(object): def __init__(self, time_aligner, bucket_size, relative=False): self.relative = relative self.time_aligner = time_aligner self.bucket_size = bucket_size def align_time(self, t, from_time): relative_to = from_time if self.relative else None return self.time_aligner(t, self.bucket_size, relative_to=relative_to) def __call__(self, from_time, datapoints): raise NotImplementedError() class LastDatapointSummarizer(Summarizer): def __call__(self, from_time, datapoints): step = self.align_time(from_time, from_time) results = [] if not datapoints: return results it = iter(datapoints) prev = next(it) for curr in it: aligned_x = self.align_time(curr['x'], from_time) if aligned_x > step: results.append({'x': step, 'y': prev['y']}) step = aligned_x prev = curr # add the last datapoint results.append({ 'x': self.align_time(prev['x'], from_time), 'y': prev['y'] }) return results class AggregatingSummarizer(Summarizer): def __init__(self, time_aligner, bucket_size, aggregator, relative=False): super(AggregatingSummarizer, self).__init__( time_aligner, bucket_size, relative) self.aggregator = aggregator def __call__(self, from_time, datapoints): step = self.align_time(from_time, from_time) results = [] if not datapoints: return results bucket = [] for datapoint in datapoints: aligned_x = self.align_time(datapoint['x'], from_time) if aligned_x > step: if bucket: results.append({'x': step, 'y': self.aggregator(bucket)}) bucket = [] step = aligned_x bucket.append(datapoint['y']) # add the aggregation result of the last bucket results.append({'x': step, 'y': self.aggregator(bucket)}) return results class Summarizers(object): def __init__(self, summarizers): self.summarizers = summarizers def get(self, name, time_alignment, bucket_size, relative=False): time_aligner = utils.time_aligners.get(time_alignment) if name not in self.summarizers: raise KeyError("No summarizer called '%s' exists" % name) summarizer_cls = self.summarizers[name] return summarizer_cls(time_aligner, bucket_size, relative=relative) null_filters = { 'skip': skip_nulls, 'zeroize': zeroize_nulls, } aggregators = { 'sum': sum, 'max': agg_max, 'min': agg_min, 'avg': agg_avg, } summarizers = Summarizers({ 'sum': partial(AggregatingSummarizer, aggregator=aggregators['sum']), 'max': partial(AggregatingSummarizer, aggregator=aggregators['max']), 'min': partial(AggregatingSummarizer, aggregator=aggregators['min']), 'avg': partial(AggregatingSummarizer, aggregator=aggregators['avg']), 'last': LastDatapointSummarizer, })
{ "repo_name": "praekelt/diamondash", "path": "diamondash/backends/processors.py", "copies": "1", "size": "3648", "license": "bsd-3-clause", "hash": 2024079177708473900, "line_mean": 26.8473282443, "line_max": 78, "alpha_frac": 0.5995065789, "autogenerated": false, "ratio": 3.6663316582914574, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.47658382371914576, "avg_score": null, "num_lines": null }
from functools import partial from django.conf import settings from django.conf.urls import url from django.contrib.auth.views import ( PasswordResetView as DefaultPasswordResetView, PasswordResetCompleteView, PasswordResetConfirmView, PasswordResetDoneView, ) from . import views as phorum_views class PasswordResetView(DefaultPasswordResetView): from_email = settings.SERVER_EMAIL urlpatterns = [ url(r'^$', phorum_views.room_list, name="home"), url(r'^inbox$', phorum_views.inbox, name="inbox"), url(r'^inbox/new-message$', phorum_views.inbox_send, name="inbox_send"), url(r'^room/new$', phorum_views.room_new, name="room_new"), url(r'^room/(?P<room_slug>.+)/$', phorum_views.room_view, name="room_view"), url(r'^room/(?P<room_slug>.+)/password$', phorum_views.room_password_prompt, name="room_password_prompt"), url(r'^room/(?P<room_slug>.+)/mark-unread$', phorum_views.room_mark_unread, name="room_mark_unread"), url(r'^room/(?P<room_slug>.+)/edit$', phorum_views.room_edit, name="room_edit"), url(r'^room/(?P<room_slug>.+)/new-message$', phorum_views.message_send, name="message_send"), url(r'^login$', phorum_views.login, name="login"), url(r'^logout$', phorum_views.logout, name="logout"), url(r'^user/new', phorum_views.user_new, name="user_new"), url(r'^user/edit', phorum_views.user_edit, name="user_edit"), url(r'^user/customization', phorum_views.user_customization, name="user_customization"), url(r'^users/', phorum_views.users, name="users"), url(r'^message/(?P<message_id>\d+)/delete', phorum_views.message_delete, name="message_delete"), url(r'^user/(?P<user_id>\d+)/custom\.(?P<res_type>css|js$)', phorum_views.custom_resource, name="custom_resource"), # Password reset links url(r'^user/password/reset/$', PasswordResetView.as_view(), name='password_reset'), url(r'^user/password/reset/done/$', PasswordResetDoneView.as_view(), name='password_reset_done'), url(r'^user/password/reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', PasswordResetConfirmView.as_view(), name='password_reset_confirm'), url(r'^user/password/reset/complete/$', PasswordResetCompleteView.as_view(), name='password_reset_complete'), ]
{ "repo_name": "sairon/score-phorum", "path": "src/phorum/urls.py", "copies": "1", "size": "2286", "license": "bsd-3-clause", "hash": -7447633410802601000, "line_mean": 50.9545454545, "line_max": 119, "alpha_frac": 0.6824146982, "autogenerated": false, "ratio": 3.1315068493150684, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.43139215475150683, "avg_score": null, "num_lines": null }
from functools import partial from django.conf import settings from django.core.exceptions import ImproperlyConfigured from cms.utils.compat import DJANGO_1_6 from cms.utils.compat.dj import is_installed as app_is_installed def validate_dependencies(): """ Check for installed apps, their versions and configuration options """ if not app_is_installed('mptt'): raise ImproperlyConfigured('django CMS requires django-mptt package.') if app_is_installed('reversion'): from reversion.admin import VersionAdmin if not hasattr(VersionAdmin, 'get_urls'): raise ImproperlyConfigured('django CMS requires newer version of reversion (VersionAdmin must contain get_urls method)') def validate_settings(): """ Check project settings file for required options """ if 'django.core.context_processors.request' not in settings.TEMPLATE_CONTEXT_PROCESSORS: raise ImproperlyConfigured('django CMS requires django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.') def setup(): """ Gather all checks and validations """ if DJANGO_1_6: # While setup is called both in all the Django versions only 1.6- # requires paching the AppCache. 1.7 provides a cleaner way to handle # this in AppConfig and thus the patching is left for older version only from django.db.models import loading def get_models_patched(self, app_mod=None, include_auto_created=False, include_deferred=False, only_installed=True): loading.AppCache.get_models(self, app_mod, include_auto_created, include_deferred, only_installed) from cms.plugin_pool import plugin_pool plugin_pool.set_plugin_meta() loading.cache.get_models = get_models_patched loading.get_models = partial(get_models_patched, loading.cache) validate_dependencies() validate_settings()
{ "repo_name": "360youlun/django-cms", "path": "cms/utils/setup.py", "copies": "2", "size": "2023", "license": "bsd-3-clause", "hash": -8079222190899157000, "line_mean": 38.6666666667, "line_max": 155, "alpha_frac": 0.6915472071, "autogenerated": false, "ratio": 4.4657836644591615, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0007260278546495354, "num_lines": 51 }
from functools import partial from django.conf import settings from django.core.exceptions import ImproperlyConfigured from templated_email import send_templated_mail class TemplatedEmailFormViewMixin(object): templated_email_template_name = None templated_email_send_on_success = True templated_email_send_on_failure = False templated_email_from_email = partial(getattr, settings, 'TEMPLATED_EMAIL_FROM_EMAIL', None) def templated_email_get_template_names(self, valid): if self.templated_email_template_name is None: raise ImproperlyConfigured( "TemplatedEmailFormViewMixin requires either a definition of " "'templated_email_template_name' or an implementation of 'templated_email_get_template_names()'") return [self.templated_email_template_name] def templated_email_get_context_data(self, **kwargs): return kwargs def templated_email_get_recipients(self, form): raise NotImplementedError('You must implement templated_email_get_recipients method') def templated_email_get_send_email_kwargs(self, valid, form): if valid: context = self.templated_email_get_context_data(form_data=form.data) else: context = self.templated_email_get_context_data(form_errors=form.errors) try: from_email = self.templated_email_from_email() except TypeError: from_email = self.templated_email_from_email return { 'template_name': self.templated_email_get_template_names(valid=valid), 'from_email': from_email, 'recipient_list': self.templated_email_get_recipients(form), 'context': context } def templated_email_send_templated_mail(self, *args, **kwargs): return send_templated_mail(*args, **kwargs) def form_valid(self, form): response = super(TemplatedEmailFormViewMixin, self).form_valid(form) if self.templated_email_send_on_success: self.templated_email_send_templated_mail( **self.templated_email_get_send_email_kwargs(valid=True, form=form)) return response def form_invalid(self, form): response = super(TemplatedEmailFormViewMixin, self).form_invalid(form) if self.templated_email_send_on_failure: self.templated_email_send_templated_mail( **self.templated_email_get_send_email_kwargs(valid=False, form=form)) return response
{ "repo_name": "BradWhittington/django-templated-email", "path": "templated_email/generic_views.py", "copies": "2", "size": "2506", "license": "mit", "hash": 5337418401547678000, "line_mean": 41.4745762712, "line_max": 113, "alpha_frac": 0.6775738228, "autogenerated": false, "ratio": 3.9464566929133857, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0015148827947194677, "num_lines": 59 }
from functools import partial from django.conf import settings from django.core.mail import EmailMessage, EmailMultiAlternatives from django.template import Context, RequestContext from django.template.loader import select_template from .util import render_node def send_base(template_name, context_data, send_method, render_method, request=None, from_email=None, send_method_args=None, **kwargs): send_method_args = send_method_args or {} args = get_message( template_name=template_name, context_data=context_data, request=request, render_method=render_method, ) args["from_email"] = from_email or settings.DEFAULT_FROM_EMAIL args.update(send_method_args) return send_method(**args) send_mail = send_base def get_message(template_name, context_data, request, render_method): if request: c = RequestContext(request, context_data) else: c = Context(context_data) if not isinstance(template_name, (list, tuple)): template_name = [template_name] template = select_template(template_name) return render_method(template, c) def render_django_fields(template, context): message = {} message["text"] = render_node(template, "text", context) message["html"] = render_node(template, "html", context) message["subject"] = render_node(template, "subject", context) recipients = render_node(template, "recipients", context) recipient_list = [] for recipient in recipients.split(","): recipient_list.append(recipient.strip()) message["recipient_list"] = recipient_list return message def send_django_wrapper(**kwargs): text = kwargs.get("text", "") html = kwargs.get("html", "") if text and html: email_class = EmailMultiAlternatives else: email_class = EmailMessage if html and not text: body = html else: body = text msg = email_class( subject=kwargs["subject"], body=body, from_email=kwargs["from_email"], to=kwargs["recipient_list"], headers=kwargs.get("headers", {}), cc=kwargs.get("cc", []), connection=kwargs.get("connection", None), ) if text and html: msg.attach_alternative(html, "text/html") if html and not text: msg.content_subtype = "html" msg.send() return msg send_django = partial(send_base, send_method=send_django_wrapper, render_method=render_django_fields, )
{ "repo_name": "prestontimmons/django-email-template", "path": "email_template/email.py", "copies": "2", "size": "2513", "license": "mit", "hash": 3266218043247853000, "line_mean": 24.9072164948, "line_max": 78, "alpha_frac": 0.6538002388, "autogenerated": false, "ratio": 3.884080370942813, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5537880609742812, "avg_score": null, "num_lines": null }
from functools import partial from django.conf import settings from elasticsearch_dsl.field import Keyword from elasticsearch_dsl.field import Object as DSLObject from elasticsearch_dsl.field import Text from kitsune.search.es7_utils import es_analyzer_for_locale SUPPORTED_LANGUAGES = list(settings.SUMO_LANGUAGES) # this is a test locale - no need to add it to ES SUPPORTED_LANGUAGES.remove("xx") def _get_fields(field, locales, **params): """Construct the sub-fields of locale aware multi-field""" data = {} for locale in locales: if field is Text: analyzer = es_analyzer_for_locale(locale) search_analyzer = es_analyzer_for_locale(locale, search_analyzer=True) field_obj = field( analyzer=analyzer, search_analyzer=search_analyzer, search_quote_analyzer=analyzer, **params, ) else: field_obj = field(**params) data[locale] = field_obj return data def construct_locale_field(field, locales, **params): """Construct a locale aware object.""" inner_fields = _get_fields(locales=locales, field=field, **params) return DSLObject(properties=inner_fields) SumoTextField = partial(construct_locale_field, field=Text) SumoKeywordField = partial(construct_locale_field, field=Keyword) # This is an object in the form of # {'en-US': Text(analyzer_for_the_specific_locale)} SumoLocaleAwareTextField = partial(SumoTextField, locales=SUPPORTED_LANGUAGES) SumoLocaleAwareKeywordField = partial(SumoKeywordField, locales=SUPPORTED_LANGUAGES)
{ "repo_name": "mozilla/kitsune", "path": "kitsune/search/fields.py", "copies": "1", "size": "1617", "license": "bsd-3-clause", "hash": -8235570924825218000, "line_mean": 33.4042553191, "line_max": 84, "alpha_frac": 0.6994434137, "autogenerated": false, "ratio": 3.934306569343066, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5133749983043066, "avg_score": null, "num_lines": null }
from functools import partial from django.conf import settings from nose.tools import eq_ from pyquery import PyQuery as pq from kitsune.gallery.tests import ImageFactory from kitsune.sumo.parser import ( WikiParser, build_hook_params, _get_wiki_link, get_object_fallback, IMAGE_PARAMS, IMAGE_PARAM_VALUES, ) from kitsune.sumo.tests import TestCase from kitsune.wiki.models import Document from kitsune.wiki.tests import DocumentFactory, ApprovedRevisionFactory def pq_link(p, text): return pq(p.parse(text))("a") def pq_img(p, text, selector="img", locale=settings.WIKI_DEFAULT_LANGUAGE): doc = pq(p.parse(text, locale=locale)) return doc(selector) def doc_rev_parser(content, title="Installing Firefox", parser_cls=WikiParser, **kwargs): p = parser_cls() d = DocumentFactory(title=title, **kwargs) r = ApprovedRevisionFactory(document=d, content=content) return (d, r, p) build_hook_params_default = partial( build_hook_params, locale=settings.WIKI_DEFAULT_LANGUAGE, allowed_params=IMAGE_PARAMS, allowed_param_values=IMAGE_PARAM_VALUES, ) class GetObjectFallbackTests(TestCase): def test_empty(self): """get_object_fallback returns message when no objects.""" # English does not exist obj = get_object_fallback(Document, "A doc", "en-US", "!") eq_("!", obj) def test_english(self): # Create the English document d = DocumentFactory(title="A doc") # Now it exists obj = get_object_fallback(Document, "A doc", "en-US", "!") eq_(d, obj) def test_from_french(self): # Create the English document d = DocumentFactory(title="A doc") d.save() # Returns English document for French obj = get_object_fallback(Document, "A doc", "fr", "!") eq_(d, obj) def test_french(self): # Create English parent document en_d = DocumentFactory() ApprovedRevisionFactory(document=en_d) # Create the French document fr_d = DocumentFactory(parent=en_d, title="A doc", locale="fr") obj = get_object_fallback(Document, "A doc", "fr", "!") eq_(fr_d, obj) # Also works when English exists DocumentFactory(title="A doc") obj = get_object_fallback(Document, "A doc", "fr", "!") eq_(fr_d, obj) def test_translated(self): """If a localization of the English fallback exists, use it.""" en_d = DocumentFactory(title="A doc") ApprovedRevisionFactory(document=en_d) fr_d = DocumentFactory(parent=en_d, title="Une doc", locale="fr") # Without an approved revision, the en-US doc should be returned. obj = get_object_fallback(Document, "A doc", "fr") eq_(en_d, obj) # Approve a revision, then fr doc should be returned. ApprovedRevisionFactory(document=fr_d) obj = get_object_fallback(Document, "A doc", "fr") eq_(fr_d, obj) def test_redirect(self): """Assert get_object_fallback follows wiki redirects.""" target_rev = ApprovedRevisionFactory(document__title="target") translated_target_rev = ApprovedRevisionFactory( document__parent=target_rev.document, document__locale="de" ) ApprovedRevisionFactory(document__title="redirect", content="REDIRECT [[target]]") eq_( translated_target_rev.document, get_object_fallback(Document, "redirect", "de"), ) def test_redirect_translations_only(self): """Make sure get_object_fallback doesn't follow redirects when working purely in the default language. That would make it hard to navigate to redirects (to edit them, for example). """ ApprovedRevisionFactory(document__title="target", content="O hai.") redirect_rev = ApprovedRevisionFactory( document__title="redirect", content="REDIRECT [[target]]" ) eq_( redirect_rev.document, get_object_fallback(Document, "redirect", redirect_rev.document.locale), ) class TestWikiParser(TestCase): def setUp(self): self.d, self.r, self.p = doc_rev_parser("Test content", "Installing Firefox") def test_image_params_page(self): """build_hook_params handles wiki pages.""" _, params = build_hook_params_default("t|page=Installing Firefox") eq_("/en-US/kb/installing-firefox", params["link"]) assert params["found"] def test_image_params_link(self): """_build_image_params handles external links.""" _, params = build_hook_params_default("t|link=http://example.com") eq_("http://example.com", params["link"]) def test_image_params_page_link(self): """_build_image_params - wiki page overrides link.""" text = "t|page=Installing Firefox|link=http://example.com" _, params = build_hook_params_default(text) eq_("/en-US/kb/installing-firefox", params["link"]) def test_image_params_align(self): """Align valid options.""" align_vals = ("none", "left", "center", "right") for align in align_vals: _, params = build_hook_params_default("test.jpg|align=" + align) eq_(align, params["align"]) def test_image_params_align_invalid(self): """Align invalid options.""" _, params = build_hook_params_default("align=zzz") assert "align" not in params, "Align is present in params" def test_image_params_valign(self): """Vertical align valid options.""" valign_vals = ( "baseline", "sub", "super", "top", "text-top", "middle", "bottom", "text-bottom", ) for valign in valign_vals: _, params = build_hook_params_default("title|valign=" + valign) eq_(valign, params["valign"]) def test_image_params_valign_invalid(self): """Vertical align invalid options.""" _, params = build_hook_params_default("valign=zzz") assert "valign" not in params, "Vertical align is present in params" def test_image_params_alt(self): """Image alt override.""" _, params = build_hook_params_default("t|alt=some alternative text") eq_("some alternative text", params["alt"]) def test_image_params_frame(self): """Framed image.""" _, params = build_hook_params_default("title|frame") assert params["frame"] def test_image_params_width_height(self): """Image width.""" _, params = build_hook_params_default("t|width=10|height=20") eq_("10", params["width"]) eq_("20", params["height"]) def test_get_wiki_link(self): """Wiki links are properly built for existing pages.""" eq_( { "found": True, "url": "/en-US/kb/installing-firefox", "text": "Installing Firefox", }, _get_wiki_link("Installing Firefox", locale=settings.WIKI_DEFAULT_LANGUAGE), ) def test_showfor(self): """<showfor> tags should be escaped, not obeyed.""" eq_( "<p>&lt;showfor&gt;smoo&lt;/showfor&gt;</p>", self.p.parse("<showfor>smoo</showfor>").replace("\n", ""), ) def test_youtube_video(self): """Verify youtube embeds.""" urls = [ "http://www.youtube.com/watch?v=oHg5SJYRHA0", "https://youtube.com/watch?v=oHg5SJYRHA0" "http://youtu.be/oHg5SJYRHA0" "https://youtu.be/oHg5SJYRHA0", ] for url in urls: doc = pq(self.p.parse("[[V:%s]]" % url)) assert doc("iframe")[0].attrib["src"].startswith("//www.youtube.com/embed/oHg5SJYRHA0") def test_iframe_in_markup(self): """Verify iframe in wiki markup is escaped.""" doc = pq(self.p.parse('<iframe src="http://example.com"></iframe>')) eq_(0, len(doc("iframe"))) def test_iframe_hell_bug_898769(self): """Verify fix for bug 898769.""" content = r"""<iframe/src \/\/onload = prompt(1) <iframe/onreadystatechange=alert(/@blinkms/) <svg/onload=alert(1)""" eq_( '<p>&lt;iframe src="" \\="" onload="prompt(1)" &lt;="" p=""' "&gt;&lt;p&gt;&lt;iframe/onreadystatechange=" "alert(/@blinkms/)\n&lt;/p&gt;&lt;p&gt;&lt;" "svg/onload=alert(1)\n&lt;/p&gt;&lt;/iframe&gt;</p>", self.p.parse(content), ) def test_injections(self): testdata = ( # Normal image urls ( '<img src="https://example.com/nursekitty.jpg">', '<p><img src="https://example.com/nursekitty.jpg">\n</p>', ), ( "<img src=https://example.com/nursekitty.jpg />", '<p><img src="https://example.com/nursekitty.jpg">\n</p>', ), ( '<img src="https://example.com/nursekitty.jpg" />', '<p><img src="https://example.com/nursekitty.jpg">\n</p>', ), ( "<img src=https://example.com/nursekitty.jpg </img>", '<p><img src="https://example.com/nursekitty.jpg"></p>', ), # Script insertions from OWASP site ("<IMG SRC=`javascript:alert(\"'XSS'\")`>", "<p><img>\n</p>"), ('<IMG SRC=javascript:alert("XSS")>', "<p><img>\n</p>"), ("<IMG SRC=JaVaScRiPt:alert('XSS')>", "<p><img>\n</p>"), ("<IMG SRC=javascript:alert('XSS')>", "<p><img>\n</p>"), ("<IMG SRC=\"javascript:alert('XSS');\">", "<p><img>\n</p>"), ) for content, expected in testdata: eq_(expected, self.p.parse(content)) class TestWikiInternalLinks(TestCase): def setUp(self): self.d, self.r, self.p = doc_rev_parser("Test content", "Installing Firefox") def test_simple(self): """Simple internal link markup.""" link = pq_link(self.p, "[[Installing Firefox]]") eq_("/en-US/kb/installing-firefox", link.attr("href")) eq_("Installing Firefox", link.text()) assert not link.hasClass("new") def test_simple_markup(self): text = "[[Installing Firefox]]" eq_( '<p><a href="/en-US/kb/installing-firefox">' + "Installing Firefox</a></p>", self.p.parse(text).replace("\n", ""), ) def test_link_hash(self): """Internal link with hash.""" link = pq_link(self.p, "[[Installing Firefox#section name]]") eq_("/en-US/kb/installing-firefox#section_name", link.attr("href")) eq_("Installing Firefox", link.text()) def test_link_hash_text(self): """Internal link with hash and text.""" link = pq_link(self.p, "[[Installing Firefox#section name|section]]") eq_("/en-US/kb/installing-firefox#section_name", link.attr("href")) eq_("section", link.text()) def test_hash_only(self): """Internal hash only.""" link = pq_link(self.p, "[[#section 3]]") eq_("#section_3", link.attr("href")) eq_("#section 3", link.text()) def test_link_name(self): """Internal link with name.""" link = pq_link(self.p, "[[Installing Firefox|this name]]") eq_("/en-US/kb/installing-firefox", link.attr("href")) eq_("this name", link.text()) def test_link_with_extra_pipe(self): link = pq_link(self.p, "[[Installing Firefox|with|pipe]]") eq_("/en-US/kb/installing-firefox", link.attr("href")) eq_("with|pipe", link.text()) def test_hash_name(self): """Internal hash with name.""" link = pq_link(self.p, "[[#section 3|this name]]") eq_("#section_3", link.attr("href")) eq_("this name", link.text()) assert not link.hasClass("new") def test_link_hash_name(self): """Internal link with hash and name.""" link = pq_link(self.p, "[[Installing Firefox#section 3|this name]]") eq_("/en-US/kb/installing-firefox#section_3", link.attr("href")) eq_("this name", link.text()) def test_link_hash_name_markup(self): """Internal link with hash and name.""" text = "[[Installing Firefox#section 3|this name]]" eq_( '<p><a href="/en-US/kb/installing-firefox#section_3">this name</a>\n</p>', self.p.parse(text), ) def test_simple_create(self): """Simple link for inexistent page.""" link = pq_link(self.p, "[[A new page]]") assert link.hasClass("new") eq_("/en-US/kb/new?title=A+new+page", link.attr("href")) eq_("A new page", link.text()) def test_link_edit_hash_name(self): """Internal link for inexistent page with hash and name.""" link = pq_link(self.p, "[[A new page#section 3|this name]]") eq_("/en-US/kb/new?title=A+new+page#section_3", link.attr("href")) eq_("this name", link.text()) def test_link_with_localization(self): """A link to an English doc with a local translation.""" en_d = DocumentFactory(title="A doc") ApprovedRevisionFactory(document=en_d) fr_d = DocumentFactory(parent=en_d, title="Une doc", locale="fr") # Without an approved revision, link should go to en-US doc. # The site should stay in fr locale (/<locale>/<en-US slug>). link = pq(self.p.parse("[[A doc]]", locale="fr")) eq_("/fr/kb/a-doc", link.find("a").attr("href")) eq_("A doc", link.find("a").text()) # Approve a revision. Now link should go to fr doc. ApprovedRevisionFactory(document=fr_d) link = pq(self.p.parse("[[A doc]]", locale="fr")) eq_("/fr/kb/une-doc", link.find("a").attr("href")) eq_("Une doc", link.find("a").text()) class TestWikiImageTags(TestCase): def setUp(self): self.d, self.r, self.p = doc_rev_parser("Test content", "Installing Firefox") self.img = ImageFactory(title="test.jpg") def tearDown(self): self.img.delete() def test_empty(self): """Empty image tag markup does not change.""" img = pq_img(self.p, "[[Image:]]", "p") eq_('The image "" does not exist.', img.text()) def test_simple(self): """Simple image tag markup.""" img = pq_img(self.p, "[[Image:test.jpg]]", "img") eq_("test.jpg", img.attr("alt")) eq_(self.img.file.url, img.attr("src")) def test_simple_fallback(self): """Fallback to English if current locale doesn't have the image.""" img = pq_img(self.p, "[[Image:test.jpg]]", selector="img", locale="ja") eq_("test.jpg", img.attr("alt")) eq_(self.img.file.url, img.attr("src")) def test_full_fallback(self): """Find current locale's image, not the English one.""" # first, pretend there is no English version self.img.locale = "ja" self.img.save() img = pq_img(self.p, "[[Image:test.jpg]]", selector="img", locale="ja") eq_("test.jpg", img.attr("alt")) eq_(self.img.file.url, img.attr("src")) # then, create an English version en_img = ImageFactory(title="test.jpg", locale="en-US") # Ensure they're not equal self.assertNotEqual(en_img.file.url, self.img.file.url) # make sure there is no fallback img = pq_img(self.p, "[[Image:test.jpg]]", selector="img", locale="ja") eq_("test.jpg", img.attr("alt")) eq_(self.img.file.url, img.attr("src")) # now delete the English version self.img.delete() self.img = en_img # don't break tearDown img = pq_img(self.p, "[[Image:test.jpg]]", selector="img", locale="ja") eq_("test.jpg", img.attr("alt")) eq_(self.img.file.url, img.attr("src")) def test_caption(self): """Give the image a caption.""" self.img.title = "img test.jpg" self.img.save() img_div = pq_img(self.p, "[[Image:img test.jpg|frame|my caption]]", "div.img") img = img_div("img") caption = img_div.text() eq_(self.img.file.url, img.attr("src")) eq_("my caption", img.attr("alt")) eq_("my caption", caption) def test_page_link(self): """Link to a wiki page.""" img_a = pq_img(self.p, "[[Image:test.jpg|page=Installing Firefox]]", "a") img = img_a("img") eq_("test.jpg", img.attr("alt")) eq_(self.img.file.url, img.attr("src")) eq_("/en-US/kb/installing-firefox", img_a.attr("href")) def test_page_link_edit(self): """Link to a nonexistent wiki page.""" img_a = pq_img(self.p, "[[Image:test.jpg|page=Article List]]", "a") img = img_a("img") eq_("test.jpg", img.attr("alt")) eq_(self.img.file.url, img.attr("src")) assert img_a.hasClass("new") eq_("/en-US/kb/new?title=Article+List", img_a.attr("href")) def test_page_link_caption(self): """Link to a wiki page with caption and frame.""" img_div = pq_img(self.p, "[[Image:test.jpg|frame|page=A page|my caption]]", "div.img") img_a = img_div("a") img = img_a("img") caption = img_div.text() eq_("my caption", img.attr("alt")) eq_("my caption", caption) eq_(self.img.file.url, img.attr("src")) assert img_a.hasClass("new") eq_("/en-US/kb/new?title=A+page", img_a.attr("href")) def test_link(self): """Link to an external page.""" img_a = pq_img(self.p, "[[Image:test.jpg|link=http://test.com]]", "a") img = img_a("img") eq_("test.jpg", img.attr("alt")) eq_(self.img.file.url, img.attr("src")) eq_("http://test.com", img_a.attr("href")) def test_link_caption(self): """Link to an external page with caption.""" img_div = pq_img(self.p, "[[Image:test.jpg|link=http://ab.us|frame|caption]]", "div.img") img = img_div("img") img_a = img_div("a") eq_(self.img.file.url, img.attr("src")) eq_("http://ab.us", img_a.attr("href")) def test_link_align(self): """Link with align.""" img_div = pq_img(self.p, "[[Image:test.jpg|link=http://site.com|align=left]]", "div.img") eq_("img align-left", img_div.attr("class")) def test_link_align_invalid(self): """Link with invalid align.""" img = pq_img(self.p, "[[Image:test.jpg|link=http://example.ro|align=inv]]") assert "frameless" in img.attr("class") def test_link_valign(self): """Link with valign.""" img = pq_img(self.p, "[[Image:test.jpg|link=http://example.com|valign=top]]") eq_("vertical-align: top;", img.attr("style")) def test_link_valign_invalid(self): """Link with invalid valign.""" img = pq_img(self.p, "[[Image:test.jpg|link=http://example.com|valign=off]]") eq_(None, img.attr("style")) def test_alt(self): """Image alt attribute is overriden but caption is not.""" img_div = pq_img(self.p, "[[Image:test.jpg|alt=my alt|frame|my caption]]", "div.img") img = img_div("img") caption = img_div.text() eq_("my alt", img.attr("alt")) eq_("my caption", caption) def test_alt_empty(self): """Image alt attribute can be empty.""" img = pq_img(self.p, "[[Image:test.jpg|alt=|my caption]]") eq_("", img.attr("alt")) def test_alt_unsafe(self): """Potentially unsafe alt content is escaped.""" unsafe_vals = ( ( 'an"<script>alert()</script>', "an&quot;&amp;amp;lt;script&amp;amp;gt;alert()&amp;amp;lt;/script&amp;amp;gt;", ), ( "an'<script>alert()</script>", "an'&lt;script&gt;alert()&lt;/script&gt;", ), ("single'\"double", "single'&quot;double"), ) for alt_sent, alt_expected in unsafe_vals: img = pq_img(self.p, "[[Image:test.jpg|alt=" + alt_sent + "]]") is_true = str(img).startswith('<img alt="' + alt_expected + '"') assert is_true, 'Expected "%s", sent "%s"' % (alt_expected, alt_sent) def test_width(self): """Image width attribute set.""" img = pq_img(self.p, "[[Image:test.jpg|width=10]]") eq_("10", img.attr("width")) def test_width_invalid(self): """Invalid image width attribute set to auto.""" img = pq_img(self.p, "[[Image:test.jpg|width=invalid]]") eq_(None, img.attr("width")) def test_height(self): """Image height attribute set.""" img = pq_img(self.p, "[[Image:test.jpg|height=10]]") eq_("10", img.attr("height")) def test_height_invalid(self): """Invalid image height attribute set to auto.""" img = pq_img(self.p, "[[Image:test.jpg|height=invalid]]") eq_(None, img.attr("height")) def test_frame(self): """Image has frame if specified.""" img_div = pq_img(self.p, "[[Image:test.jpg|frame|caption]]", "div.img") assert not img_div("img").hasClass("frameless") eq_("caption", img_div("img").attr("alt")) eq_("caption", img_div.text()) eq_(self.img.file.url, img_div("img").attr("src")) def test_frameless_link(self): """Image has frameless class and link if specified.""" img_a = pq_img(self.p, "[[Image:test.jpg|page=Installing Firefox]]", "a") img = img_a("img") assert "frameless" in img.attr("class") eq_("/en-US/kb/installing-firefox", img_a.attr("href"))
{ "repo_name": "mozilla/kitsune", "path": "kitsune/sumo/tests/test_parser.py", "copies": "1", "size": "21767", "license": "bsd-3-clause", "hash": -5786947999118487000, "line_mean": 36.018707483, "line_max": 99, "alpha_frac": 0.562824459, "autogenerated": false, "ratio": 3.484950368235671, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4547774827235671, "avg_score": null, "num_lines": null }
from functools import partial from django.conf import settings from nose.tools import eq_ from pyquery import PyQuery as pq from kitsune.gallery.tests import image from kitsune.sumo.parser import ( WikiParser, build_hook_params, _get_wiki_link, get_object_fallback, IMAGE_PARAMS, IMAGE_PARAM_VALUES) from kitsune.sumo.tests import TestCase from kitsune.wiki.models import Document from kitsune.wiki.tests import document, revision def pq_link(p, text): return pq(p.parse(text))('a') def pq_img(p, text, selector='img', locale=settings.WIKI_DEFAULT_LANGUAGE): doc = pq(p.parse(text, locale=locale)) return doc(selector) def doc_rev_parser(content, title='Installing Firefox', parser_cls=WikiParser): p = parser_cls() d = document(title=title) d.save() r = revision(document=d, content=content, is_approved=True) r.save() return (d, r, p) build_hook_params_default = partial(build_hook_params, locale=settings.WIKI_DEFAULT_LANGUAGE, allowed_params=IMAGE_PARAMS, allowed_param_values=IMAGE_PARAM_VALUES) class GetObjectFallbackTests(TestCase): def test_empty(self): """get_object_fallback returns message when no objects.""" # English does not exist obj = get_object_fallback(Document, 'A doc', 'en-US', '!') eq_('!', obj) def test_english(self): # Create the English document d = document(title='A doc') d.save() # Now it exists obj = get_object_fallback(Document, 'A doc', 'en-US', '!') eq_(d, obj) def test_from_french(self): # Create the English document d = document(title='A doc') d.save() # Returns English document for French obj = get_object_fallback(Document, 'A doc', 'fr', '!') eq_(d, obj) def test_french(self): # Create English parent document en_d = document() en_d.save() en_r = revision(document=en_d, is_approved=True) en_r.save() # Create the French document fr_d = document(parent=en_d, title='A doc', locale='fr') fr_d.save() obj = get_object_fallback(Document, 'A doc', 'fr', '!') eq_(fr_d, obj) # Also works when English exists d = document(title='A doc') d.save() obj = get_object_fallback(Document, 'A doc', 'fr', '!') eq_(fr_d, obj) def test_translated(self): """If a localization of the English fallback exists, use it.""" en_d = document(title='A doc') en_d.save() en_r = revision(document=en_d, is_approved=True) en_r.save() fr_d = document(parent=en_d, title='Une doc', locale='fr') fr_d.save() # Without an approved revision, the en-US doc should be returned. obj = get_object_fallback(Document, 'A doc', 'fr') eq_(en_d, obj) # Approve a revision, then fr doc should be returned. fr_r = revision(document=fr_d, is_approved=True) fr_r.save() obj = get_object_fallback(Document, 'A doc', 'fr') eq_(fr_d, obj) def test_redirect(self): """Assert get_object_fallback follows wiki redirects.""" target_rev = revision( document=document(title='target', save=True), is_approved=True, save=True) translated_target_rev = revision( document=document(parent=target_rev.document, locale='de', save=True), is_approved=True, save=True) revision( document=document(title='redirect', save=True), content='REDIRECT [[target]]', is_approved=True).save() eq_(translated_target_rev.document, get_object_fallback(Document, 'redirect', 'de')) def test_redirect_translations_only(self): """Make sure get_object_fallback doesn't follow redirects when working purely in the default language. That would make it hard to navigate to redirects (to edit them, for example). """ revision(document=document(title='target', save=True), content='O hai.', is_approved=True, save=True) redirect_rev = revision(document=document(title='redirect', save=True), content='REDIRECT [[target]]', is_approved=True, save=True) eq_(redirect_rev.document, get_object_fallback(Document, 'redirect', redirect_rev.document.locale)) class TestWikiParser(TestCase): def setUp(self): self.d, self.r, self.p = doc_rev_parser( 'Test content', 'Installing Firefox') def test_image_params_page(self): """build_hook_params handles wiki pages.""" _, params = build_hook_params_default('t|page=Installing Firefox') eq_('/en-US/kb/installing-firefox', params['link']) assert params['found'] def test_image_params_link(self): """_build_image_params handles external links.""" _, params = build_hook_params_default('t|link=http://example.com') eq_('http://example.com', params['link']) def test_image_params_page_link(self): """_build_image_params - wiki page overrides link.""" text = 't|page=Installing Firefox|link=http://example.com' _, params = build_hook_params_default(text) eq_('/en-US/kb/installing-firefox', params['link']) def test_image_params_align(self): """Align valid options.""" align_vals = ('none', 'left', 'center', 'right') for align in align_vals: _, params = build_hook_params_default('test.jpg|align=' + align) eq_(align, params['align']) def test_image_params_align_invalid(self): """Align invalid options.""" _, params = build_hook_params_default('align=zzz') assert not 'align' in params, 'Align is present in params' def test_image_params_valign(self): """Vertical align valid options.""" valign_vals = ('baseline', 'sub', 'super', 'top', 'text-top', 'middle', 'bottom', 'text-bottom') for valign in valign_vals: _, params = build_hook_params_default('title|valign=' + valign) eq_(valign, params['valign']) def test_image_params_valign_invalid(self): """Vertical align invalid options.""" _, params = build_hook_params_default('valign=zzz') assert not 'valign' in params, 'Vertical align is present in params' def test_image_params_alt(self): """Image alt override.""" _, params = build_hook_params_default('t|alt=some alternative text') eq_('some alternative text', params['alt']) def test_image_params_frame(self): """Framed image.""" _, params = build_hook_params_default('title|frame') assert params['frame'] def test_image_params_width_height(self): """Image width.""" _, params = build_hook_params_default('t|width=10|height=20') eq_('10', params['width']) eq_('20', params['height']) def test_get_wiki_link(self): """Wiki links are properly built for existing pages.""" eq_({'found': True, 'url': '/en-US/kb/installing-firefox', 'text': 'Installing Firefox'}, _get_wiki_link('Installing Firefox', locale=settings.WIKI_DEFAULT_LANGUAGE)) def test_showfor(self): """<showfor> tags should be escaped, not obeyed.""" eq_('<p>&lt;showfor&gt;smoo&lt;/showfor&gt;</p>', self.p.parse('<showfor>smoo</showfor>').replace('\n', '')) def test_youtube_video(self): """Verify youtube embeds.""" urls = ['http://www.youtube.com/watch?v=oHg5SJYRHA0', 'https://youtube.com/watch?v=oHg5SJYRHA0' 'http://youtu.be/oHg5SJYRHA0' 'https://youtu.be/oHg5SJYRHA0'] for url in urls: doc = pq(self.p.parse('[[V:%s]]' % url)) assert doc('iframe')[0].attrib['src'].startswith( '//www.youtube.com/embed/oHg5SJYRHA0') def test_iframe_in_markup(self): """Verify iframe in wiki markup is escaped.""" doc = pq(self.p.parse('<iframe src="http://example.com"></iframe>')) eq_(0, len(doc('iframe'))) def test_iframe_hell_bug_898769(self): """Verify fix for bug 898769.""" content = """<iframe/src \/\/onload = prompt(1) <iframe/onreadystatechange=alert(/@blinkms/) <svg/onload=alert(1)""" eq_('<p>&lt;iframe &lt;="" \\="" onload="prompt(1)" p="" ' 'src=""&gt;</p><p>&lt;iframe onreadystatechange="' 'alert(/@blinkms/)" &lt;="" p=""&gt;</p><p>&lt;svg ' 'onload="alert(1)" &lt;="" p=""&gt;&lt;/iframe&gt;</p>', self.p.parse(content)) def test_injections(self): testdata = ( # Normal image urls ('<img src="https://example.com/nursekitty.jpg">', '<p><img src="https://example.com/nursekitty.jpg">\n</p>'), ('<img src=https://example.com/nursekitty.jpg />', '<p><img src="https://example.com/nursekitty.jpg">\n</p>'), ('<img src="https://example.com/nursekitty.jpg" />', '<p><img src="https://example.com/nursekitty.jpg">\n</p>'), ('<img src=https://example.com/nursekitty.jpg </img>', '<p><img src="https://example.com/nursekitty.jpg"></p>'), # Script insertions from OWASP site ('<IMG SRC=`javascript:alert("\'XSS\'")`>', '<p><img>\n</p>'), ('<IMG SRC=javascript:alert("XSS")>', '<p><img>\n</p>'), ('<IMG SRC=JaVaScRiPt:alert(\'XSS\')>', '<p><img>\n</p>'), ('<IMG SRC=javascript:alert(\'XSS\')>', '<p><img>\n</p>'), ('<IMG SRC="javascript:alert(\'XSS\');">', '<p><img>\n</p>'), ) for content, expected in testdata: eq_(expected, self.p.parse(content)) class TestWikiInternalLinks(TestCase): def setUp(self): self.d, self.r, self.p = doc_rev_parser( 'Test content', 'Installing Firefox') def test_simple(self): """Simple internal link markup.""" link = pq_link(self.p, '[[Installing Firefox]]') eq_('/en-US/kb/installing-firefox', link.attr('href')) eq_('Installing Firefox', link.text()) assert not link.hasClass('new') def test_simple_markup(self): text = '[[Installing Firefox]]' eq_('<p><a href="/en-US/kb/installing-firefox">' + 'Installing Firefox</a></p>', self.p.parse(text).replace('\n', '')) def test_link_hash(self): """Internal link with hash.""" link = pq_link(self.p, '[[Installing Firefox#section name]]') eq_('/en-US/kb/installing-firefox#section_name', link.attr('href')) eq_('Installing Firefox', link.text()) def test_link_hash_text(self): """Internal link with hash and text.""" link = pq_link(self.p, '[[Installing Firefox#section name|section]]') eq_('/en-US/kb/installing-firefox#section_name', link.attr('href')) eq_('section', link.text()) def test_hash_only(self): """Internal hash only.""" link = pq_link(self.p, '[[#section 3]]') eq_('#section_3', link.attr('href')) eq_('#section 3', link.text()) def test_link_name(self): """Internal link with name.""" link = pq_link(self.p, '[[Installing Firefox|this name]]') eq_('/en-US/kb/installing-firefox', link.attr('href')) eq_('this name', link.text()) def test_link_with_extra_pipe(self): link = pq_link(self.p, '[[Installing Firefox|with|pipe]]') eq_('/en-US/kb/installing-firefox', link.attr('href')) eq_('with|pipe', link.text()) def test_hash_name(self): """Internal hash with name.""" link = pq_link(self.p, '[[#section 3|this name]]') eq_('#section_3', link.attr('href')) eq_('this name', link.text()) assert not link.hasClass('new') def test_link_hash_name(self): """Internal link with hash and name.""" link = pq_link(self.p, '[[Installing Firefox#section 3|this name]]') eq_('/en-US/kb/installing-firefox#section_3', link.attr('href')) eq_('this name', link.text()) def test_link_hash_name_markup(self): """Internal link with hash and name.""" text = '[[Installing Firefox#section 3|this name]]' eq_('<p><a href="/en-US/kb/installing-firefox#section_3"' + '>this name</a>\n</p>', self.p.parse(text)) def test_simple_create(self): """Simple link for inexistent page.""" link = pq_link(self.p, '[[A new page]]') assert link.hasClass('new') eq_('/en-US/kb/new?title=A+new+page', link.attr('href')) eq_('A new page', link.text()) def test_link_edit_hash_name(self): """Internal link for inexistent page with hash and name.""" link = pq_link(self.p, '[[A new page#section 3|this name]]') eq_('/en-US/kb/new?title=A+new+page#section_3', link.attr('href')) eq_('this name', link.text()) def test_link_with_localization(self): """A link to an English doc with a local translation.""" en_d = document(title='A doc') en_d.save() en_r = revision(document=en_d, is_approved=True) en_r.save() fr_d = document(parent=en_d, title='Une doc', locale='fr') fr_d.save() # Without an approved revision, link should go to en-US doc. # The site should stay in fr locale (/<locale>/<en-US slug>). link = pq(self.p.parse('[[A doc]]', locale='fr')) eq_('/fr/kb/a-doc', link.find('a').attr('href')) eq_('A doc', link.find('a').text()) # Approve a revision. Now link should go to fr doc. fr_r = revision(document=fr_d, is_approved=True) fr_r.save() link = pq(self.p.parse('[[A doc]]', locale='fr')) eq_('/fr/kb/une-doc', link.find('a').attr('href')) eq_('Une doc', link.find('a').text()) class TestWikiImageTags(TestCase): def setUp(self): self.d, self.r, self.p = doc_rev_parser( 'Test content', 'Installing Firefox') self.img = image(title='test.jpg') def tearDown(self): self.img.delete() def test_empty(self): """Empty image tag markup does not change.""" img = pq_img(self.p, '[[Image:]]', 'p') eq_('The image "" does not exist.', img.text()) def test_simple(self): """Simple image tag markup.""" img = pq_img(self.p, '[[Image:test.jpg]]', 'img') eq_('test.jpg', img.attr('alt')) eq_(self.img.file.url, img.attr('src')) def test_simple_fallback(self): """Fallback to English if current locale doesn't have the image.""" img = pq_img(self.p, '[[Image:test.jpg]]', selector='img', locale='ja') eq_('test.jpg', img.attr('alt')) eq_(self.img.file.url, img.attr('src')) def test_full_fallback(self): """Find current locale's image, not the English one.""" # first, pretend there is no English version self.img.locale = 'ja' self.img.save() img = pq_img(self.p, '[[Image:test.jpg]]', selector='img', locale='ja') eq_('test.jpg', img.attr('alt')) eq_(self.img.file.url, img.attr('src')) # then, create an English version en_img = image(title='test.jpg') # Ensure they're not equal self.assertNotEquals(en_img.file.url, self.img.file.url) # make sure there is no fallback img = pq_img(self.p, '[[Image:test.jpg]]', selector='img', locale='ja') eq_('test.jpg', img.attr('alt')) eq_(self.img.file.url, img.attr('src')) # now delete the English version self.img.delete() self.img = en_img # don't break tearDown img = pq_img(self.p, '[[Image:test.jpg]]', selector='img', locale='ja') eq_('test.jpg', img.attr('alt')) eq_(self.img.file.url, img.attr('src')) def test_caption(self): """Give the image a caption.""" self.img.title = 'img test.jpg' self.img.save() img_div = pq_img(self.p, '[[Image:img test.jpg|frame|my caption]]', 'div.img') img = img_div('img') caption = img_div.text() eq_(self.img.file.url, img.attr('src')) eq_('my caption', img.attr('alt')) eq_('my caption', caption) def test_page_link(self): """Link to a wiki page.""" img_a = pq_img(self.p, '[[Image:test.jpg|page=Installing Firefox]]', 'a') img = img_a('img') eq_('test.jpg', img.attr('alt')) eq_(self.img.file.url, img.attr('src')) eq_('/en-US/kb/installing-firefox', img_a.attr('href')) def test_page_link_edit(self): """Link to a nonexistent wiki page.""" img_a = pq_img(self.p, '[[Image:test.jpg|page=Article List]]', 'a') img = img_a('img') eq_('test.jpg', img.attr('alt')) eq_(self.img.file.url, img.attr('src')) assert img_a.hasClass('new') eq_('/en-US/kb/new?title=Article+List', img_a.attr('href')) def test_page_link_caption(self): """Link to a wiki page with caption and frame.""" img_div = pq_img(self.p, '[[Image:test.jpg|frame|page=A page|my caption]]', 'div.img') img_a = img_div('a') img = img_a('img') caption = img_div.text() eq_('my caption', img.attr('alt')) eq_('my caption', caption) eq_(self.img.file.url, img.attr('src')) assert img_a.hasClass('new') eq_('/en-US/kb/new?title=A+page', img_a.attr('href')) def test_link(self): """Link to an external page.""" img_a = pq_img(self.p, '[[Image:test.jpg|link=http://test.com]]', 'a') img = img_a('img') eq_('test.jpg', img.attr('alt')) eq_(self.img.file.url, img.attr('src')) eq_('http://test.com', img_a.attr('href')) def test_link_caption(self): """Link to an external page with caption.""" img_div = pq_img(self.p, '[[Image:test.jpg|link=http://ab.us|frame|caption]]', 'div.img') img = img_div('img') img_a = img_div('a') eq_(self.img.file.url, img.attr('src')) eq_('http://ab.us', img_a.attr('href')) def test_link_align(self): """Link with align.""" img_div = pq_img(self.p, '[[Image:test.jpg|link=http://site.com|align=left]]', 'div.img') eq_('img align-left', img_div.attr('class')) def test_link_align_invalid(self): """Link with invalid align.""" img = pq_img(self.p, '[[Image:test.jpg|link=http://example.ro|align=inv]]') assert 'frameless' in img.attr('class') def test_link_valign(self): """Link with valign.""" img = pq_img(self.p, '[[Image:test.jpg|link=http://example.com|valign=top]]') eq_('vertical-align: top;', img.attr('style')) def test_link_valign_invalid(self): """Link with invalid valign.""" img = pq_img(self.p, '[[Image:test.jpg|link=http://example.com|valign=off]]') eq_(None, img.attr('style')) def test_alt(self): """Image alt attribute is overriden but caption is not.""" img_div = pq_img(self.p, '[[Image:test.jpg|alt=my alt|frame|my caption]]', 'div.img') img = img_div('img') caption = img_div.text() eq_('my alt', img.attr('alt')) eq_('my caption', caption) def test_alt_empty(self): """Image alt attribute can be empty.""" img = pq_img(self.p, '[[Image:test.jpg|alt=|my caption]]') eq_('', img.attr('alt')) def test_alt_unsafe(self): """Potentially unsafe alt content is escaped.""" unsafe_vals = ( ('an"<script>alert()</script>', 'an&quot;&amp;lt;script&amp;gt;alert()&amp;lt;/script&amp;gt;'), ("an'<script>alert()</script>", "an'&amp;lt;script&amp;gt;alert()&amp;lt;/script&amp;gt;"), ('single\'"double', "single'&quot;double"), ) for alt_sent, alt_expected in unsafe_vals: img = pq_img(self.p, '[[Image:test.jpg|alt=' + alt_sent + ']]') is_true = str(img).startswith('<img alt="' + alt_expected + '"') assert is_true, ('Expected "%s", sent "%s"' % (alt_expected, alt_sent)) def test_width(self): """Image width attribute set.""" img = pq_img(self.p, '[[Image:test.jpg|width=10]]') eq_('10', img.attr('width')) def test_width_invalid(self): """Invalid image width attribute set to auto.""" img = pq_img(self.p, '[[Image:test.jpg|width=invalid]]') eq_(None, img.attr('width')) def test_height(self): """Image height attribute set.""" img = pq_img(self.p, '[[Image:test.jpg|height=10]]') eq_('10', img.attr('height')) def test_height_invalid(self): """Invalid image height attribute set to auto.""" img = pq_img(self.p, '[[Image:test.jpg|height=invalid]]') eq_(None, img.attr('height')) def test_frame(self): """Image has frame if specified.""" img_div = pq_img(self.p, '[[Image:test.jpg|frame|caption]]', 'div.img') assert not img_div('img').hasClass('frameless') eq_('caption', img_div('img').attr('alt')) eq_('caption', img_div.text()) eq_(self.img.file.url, img_div('img').attr('src')) def test_frameless_link(self): """Image has frameless class and link if specified.""" img_a = pq_img(self.p, '[[Image:test.jpg|page=Installing Firefox]]', 'a') img = img_a('img') assert 'frameless' in img.attr('class') eq_('/en-US/kb/installing-firefox', img_a.attr('href'))
{ "repo_name": "dbbhattacharya/kitsune", "path": "kitsune/sumo/tests/test_parser.py", "copies": "1", "size": "22480", "license": "bsd-3-clause", "hash": 2436035140925263000, "line_mean": 36.28026534, "line_max": 79, "alpha_frac": 0.5479537367, "autogenerated": false, "ratio": 3.5190983093299937, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9566252596966299, "avg_score": 0.0001598898127388536, "num_lines": 603 }
from functools import partial from django.contrib.admin.checks import InlineModelAdminChecks from django.contrib.admin.options import InlineModelAdmin, flatten_fieldsets from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.forms import ( BaseGenericInlineFormSet, generic_inlineformset_factory, ) from django.core import checks from django.core.exceptions import FieldDoesNotExist from django.forms import ALL_FIELDS from django.forms.models import modelform_defines_fields class GenericInlineModelAdminChecks(InlineModelAdminChecks): def _check_exclude_of_parent_model(self, obj, parent_model): # There's no FK to exclude, so no exclusion checks are required. return [] def _check_relation(self, obj, parent_model): # There's no FK, but we do need to confirm that the ct_field and ct_fk_field are valid, # and that they are part of a GenericForeignKey. gfks = [ f for f in obj.model._meta.private_fields if isinstance(f, GenericForeignKey) ] if len(gfks) == 0: return [ checks.Error( "'%s.%s' has no GenericForeignKey." % ( obj.model._meta.app_label, obj.model._meta.object_name ), obj=obj.__class__, id='admin.E301' ) ] else: # Check that the ct_field and ct_fk_fields exist try: obj.model._meta.get_field(obj.ct_field) except FieldDoesNotExist: return [ checks.Error( "'ct_field' references '%s', which is not a field on '%s.%s'." % ( obj.ct_field, obj.model._meta.app_label, obj.model._meta.object_name ), obj=obj.__class__, id='admin.E302' ) ] try: obj.model._meta.get_field(obj.ct_fk_field) except FieldDoesNotExist: return [ checks.Error( "'ct_fk_field' references '%s', which is not a field on '%s.%s'." % ( obj.ct_fk_field, obj.model._meta.app_label, obj.model._meta.object_name ), obj=obj.__class__, id='admin.E303' ) ] # There's one or more GenericForeignKeys; make sure that one of them # uses the right ct_field and ct_fk_field. for gfk in gfks: if gfk.ct_field == obj.ct_field and gfk.fk_field == obj.ct_fk_field: return [] return [ checks.Error( "'%s.%s' has no GenericForeignKey using content type field '%s' and object ID field '%s'." % ( obj.model._meta.app_label, obj.model._meta.object_name, obj.ct_field, obj.ct_fk_field ), obj=obj.__class__, id='admin.E304' ) ] class GenericInlineModelAdmin(InlineModelAdmin): ct_field = "content_type" ct_fk_field = "object_id" formset = BaseGenericInlineFormSet checks_class = GenericInlineModelAdminChecks def get_formset(self, request, obj=None, **kwargs): if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) if self.exclude is None: exclude = [] else: exclude = list(self.exclude) exclude.extend(self.get_readonly_fields(request, obj)) if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # GenericInlineModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) exclude = exclude or None can_delete = self.can_delete and self.has_delete_permission(request, obj) defaults = { "ct_field": self.ct_field, "fk_field": self.ct_fk_field, "form": self.form, "formfield_callback": partial(self.formfield_for_dbfield, request=request), "formset": self.formset, "extra": self.get_extra(request, obj), "can_delete": can_delete, "can_order": False, "fields": fields, "min_num": self.get_min_num(request, obj), "max_num": self.get_max_num(request, obj), "exclude": exclude } defaults.update(kwargs) if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = ALL_FIELDS return generic_inlineformset_factory(self.model, **defaults) class GenericStackedInline(GenericInlineModelAdmin): template = 'admin/edit_inline/stacked.html' class GenericTabularInline(GenericInlineModelAdmin): template = 'admin/edit_inline/tabular.html'
{ "repo_name": "kosz85/django", "path": "django/contrib/contenttypes/admin.py", "copies": "45", "size": "5212", "license": "bsd-3-clause", "hash": -2612932643504894500, "line_mean": 38.1879699248, "line_max": 114, "alpha_frac": 0.5554489639, "autogenerated": false, "ratio": 4.350584307178631, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0010501711100455963, "num_lines": 133 }
from functools import partial from django.contrib.auth import BACKEND_SESSION_KEY from django.contrib.auth.views import login as auth_login from .forms import OTPAuthenticationForm, OTPTokenForm def login(request, **kwargs): """ This is a replacement for :func:`django.contrib.auth.views.login` that requires two-factor authentication. It's slightly clever: if the user is already authenticated but not verified, it will only ask the user for their OTP token. If the user is anonymous or is already verified by an OTP device, it will use the full username/password/token form. In order to use this, you must supply a template that is compatible with both :class:`~django_otp.forms.OTPAuthenticationForm` and :class:`~django_otp.forms.OTPTokenForm`. This is a good view for :setting:`OTP_LOGIN_URL`. Parameters are the same as :func:`~django.contrib.auth.views.login` except that this view always overrides ``authentication_form``. """ user = request.user if user.is_anonymous() or user.is_verified(): form = OTPAuthenticationForm else: form = partial(OTPTokenForm, user) # A minor hack to make django.contrib.auth.login happy user.backend = request.session[BACKEND_SESSION_KEY] kwargs['authentication_form'] = form return auth_login(request, **kwargs)
{ "repo_name": "robintema/django-otp", "path": "django_otp/views.py", "copies": "1", "size": "1365", "license": "bsd-2-clause", "hash": -9088907153582801000, "line_mean": 36.9166666667, "line_max": 79, "alpha_frac": 0.7223443223, "autogenerated": false, "ratio": 4.002932551319648, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 36 }
from functools import partial from django.contrib.contenttypes.generic import GenericInlineModelAdmin, GenericRelation from django.contrib.contenttypes.models import ContentType from django.core.exceptions import PermissionDenied from django.db import models from django.db.models.query import QuerySet from django.db.models.related import RelatedObject from django.forms.models import model_to_dict from django.http import HttpResponseRedirect from django.shortcuts import get_object_or_404 from django.template.response import TemplateResponse from django.utils.encoding import force_unicode from django.utils.safestring import mark_safe from django.utils.text import capfirst from django.utils.translation import ugettext as _ from xadmin.layout import Field, render_field from xadmin.plugins.actions import BaseActionView from xadmin.plugins.inline import InlineModelAdmin from xadmin.sites import site from xadmin.util import unquote, quote, model_format_dict from xadmin.views import BaseAdminPlugin, ModelAdminView, CreateAdminView, UpdateAdminView, DetailAdminView, ModelFormAdminView, DeleteAdminView, ListAdminView from xadmin.views.base import csrf_protect_m, filter_hook from xadmin.views.detail import DetailAdminUtil from reversion.models import Revision, Version from reversion.revisions import default_revision_manager, RegistrationError def _autoregister(admin, model, follow=None): """Registers a model with reversion, if required.""" if model._meta.proxy: raise RegistrationError("Proxy models cannot be used with django-reversion, register the parent class instead") if not admin.revision_manager.is_registered(model): follow = follow or [] for parent_cls, field in model._meta.parents.items(): follow.append(field.name) _autoregister(admin, parent_cls) admin.revision_manager.register( model, follow=follow, format=admin.reversion_format) def _registe_model(admin, model): if not hasattr(admin, 'revision_manager'): admin.revision_manager = default_revision_manager if not hasattr(admin, 'reversion_format'): admin.reversion_format = 'json' if not admin.revision_manager.is_registered(model): inline_fields = [] for inline in getattr(admin, 'inlines', []): inline_model = inline.model if issubclass(inline, GenericInlineModelAdmin): ct_field = inline.ct_field ct_fk_field = inline.ct_fk_field for field in model._meta.many_to_many: if isinstance(field, GenericRelation) and field.rel.to == inline_model and field.object_id_field_name == ct_fk_field and field.content_type_field_name == ct_field: inline_fields.append(field.name) _autoregister(admin, inline_model) else: fk_name = getattr(inline, 'fk_name', None) if not fk_name: for field in inline_model._meta.fields: if isinstance(field, (models.ForeignKey, models.OneToOneField)) and issubclass(model, field.rel.to): fk_name = field.name _autoregister(admin, inline_model, follow=[fk_name]) if not inline_model._meta.get_field(fk_name).rel.is_hidden(): accessor = inline_model._meta.get_field( fk_name).related.get_accessor_name() inline_fields.append(accessor) _autoregister(admin, model, inline_fields) def registe_models(admin_site=None): if admin_site is None: admin_site = site for model, admin in admin_site._registry.items(): if getattr(admin, 'reversion_enable', False): _registe_model(admin, model) class ReversionPlugin(BaseAdminPlugin): # The revision manager instance used to manage revisions. revision_manager = default_revision_manager # The serialization format to use when registering models with reversion. reversion_format = "json" # Whether to ignore duplicate revision data. ignore_duplicate_revisions = False reversion_enable = False def init_request(self, *args, **kwargs): return self.reversion_enable @property def revision_context_manager(self): """The revision context manager for this VersionAdmin.""" return self.revision_manager._revision_context_manager def get_revision_instances(self, obj): """Returns all the instances to be used in the object's revision.""" return [obj] def get_revision_data(self, obj, flag): """Returns all the revision data to be used in the object's revision.""" return dict( (o, self.revision_manager.get_adapter( o.__class__).get_version_data(o, flag)) for o in self.get_revision_instances(obj) ) def save_revision(self, obj, tag, comment): self.revision_manager.save_revision( self.get_revision_data(obj, tag), user=self.user, comment=comment, ignore_duplicates=self.ignore_duplicate_revisions, db=self.revision_context_manager.get_db(), ) def do_post(self, __): def _method(): self.revision_context_manager.set_user(self.user) comment = '' admin_view = self.admin_view if isinstance(admin_view, CreateAdminView): comment = _(u"Initial version.") elif isinstance(admin_view, UpdateAdminView): comment = _(u"Change version.") elif isinstance(admin_view, RevisionView): comment = _(u"Revert version.") elif isinstance(admin_view, RecoverView): comment = _(u"Rercover version.") elif isinstance(admin_view, DeleteAdminView): comment = _(u"Deleted %(verbose_name)s.") % { "verbose_name": self.opts.verbose_name} self.revision_context_manager.set_comment(comment) return __() return _method def post(self, __, request, *args, **kwargs): return self.revision_context_manager.create_revision(manage_manually=False)(self.do_post(__))() # def save_models(self, __): # self.revision_context_manager.create_revision(manage_manually=True)(__)() # if self.admin_view.org_obj is None: # self.save_revision(self.admin_view.new_obj, VERSION_ADD, _(u"Initial version.")) # else: # self.save_revision(self.admin_view.new_obj, VERSION_CHANGE, _(u"Change version.")) # def save_related(self, __): # self.revision_context_manager.create_revision(manage_manually=True)(__)() # def delete_model(self, __): # self.save_revision(self.admin_view.obj, VERSION_DELETE, \ # _(u"Deleted %(verbose_name)s.") % {"verbose_name": self.opts.verbose_name}) # self.revision_context_manager.create_revision(manage_manually=True)(__)() # Block Views def block_top_toolbar(self, context, nodes): recoverlist_url = self.admin_view.model_admin_url('recoverlist') nodes.append(mark_safe('<a class="btn btn-small" href="%s"><i class="icon-trash"></i> %s</a>' % (recoverlist_url, _(u"Recover deleted")))) def block_object_tools(self, context, nodes): obj = getattr( self.admin_view, 'org_obj', getattr(self.admin_view, 'obj', None)) if obj: revisionlist_url = self.admin_view.model_admin_url( 'revisionlist', quote(obj.pk)) nodes.append(mark_safe('<a href="%s" class="btn btn-small"><i class="icon-time"></i> %s</a>' % (revisionlist_url, _(u'History')))) class BaseReversionView(ModelAdminView): # The revision manager instance used to manage revisions. revision_manager = default_revision_manager # The serialization format to use when registering models with reversion. reversion_format = "json" # Whether to ignore duplicate revision data. ignore_duplicate_revisions = False # If True, then the default ordering of object_history and recover lists will be reversed. history_latest_first = False reversion_enable = False def init_request(self, *args, **kwargs): if not self.has_change_permission() and not self.has_add_permission(): raise PermissionDenied def _order_version_queryset(self, queryset): """Applies the correct ordering to the given version queryset.""" if self.history_latest_first: return queryset.order_by("-pk") return queryset.order_by("pk") class RecoverListView(BaseReversionView): recover_list_template = None def get_context(self): context = super(RecoverListView, self).get_context() opts = self.opts deleted = self._order_version_queryset( self.revision_manager.get_deleted(self.model)) context.update({ "opts": opts, "app_label": opts.app_label, "module_name": capfirst(opts.verbose_name), "title": _("Recover deleted %(name)s") % {"name": force_unicode(opts.verbose_name_plural)}, "deleted": deleted, "changelist_url": self.model_admin_url("changelist"), }) return context @csrf_protect_m def get(self, request, *args, **kwargs): context = self.get_context() return TemplateResponse( request, self.recover_list_template or self.get_template_list( "views/recover_list.html"), context, current_app=self.admin_site.name) class RevisionListView(BaseReversionView): object_history_template = None revision_diff_template = None def get_context(self): context = super(RevisionListView, self).get_context() opts = self.opts action_list = [ { "revision": version.revision, "url": self.model_admin_url('revision', quote(version.object_id), version.id), "version": version } for version in self._order_version_queryset(self.revision_manager.get_for_object_reference( self.model, self.obj.pk, ).select_related("revision__user")) ] context.update({ 'title': _('Change history: %s') % force_unicode(self.obj), 'action_list': action_list, 'module_name': capfirst(force_unicode(opts.verbose_name_plural)), 'object': self.obj, 'app_label': opts.app_label, "changelist_url": self.model_admin_url("changelist"), "update_url": self.model_admin_url("change", self.obj.pk), 'opts': opts, }) return context def get(self, request, object_id, *args, **kwargs): object_id = unquote(object_id) self.obj = self.get_object(object_id) if not self.has_change_permission(self.obj): raise PermissionDenied return self.get_response() def get_response(self): context = self.get_context() return TemplateResponse(self.request, self.object_history_template or self.get_template_list('views/model_history.html'), context, current_app=self.admin_site.name) def get_version_object(self, version): obj_version = version.object_version obj = obj_version.object obj._state.db = self.obj._state.db for field_name, pks in obj_version.m2m_data.items(): f = self.opts.get_field(field_name) if f.rel and isinstance(f.rel, models.ManyToManyRel): setattr(obj, f.name, f.rel.to._default_manager.get_query_set( ).filter(pk__in=pks).all()) detail = self.get_model_view(DetailAdminUtil, self.model, obj) return obj, detail def post(self, request, object_id, *args, **kwargs): object_id = unquote(object_id) self.obj = self.get_object(object_id) if not self.has_change_permission(self.obj): raise PermissionDenied params = self.request.POST if 'version_a' not in params or 'version_b' not in params: self.message_user(_("Must select two versions."), 'error') return self.get_response() version_a_id = params['version_a'] version_b_id = params['version_b'] if version_a_id == version_b_id: self.message_user( _("Please select two different versions."), 'error') return self.get_response() version_a = get_object_or_404(Version, pk=version_a_id) version_b = get_object_or_404(Version, pk=version_b_id) diffs = [] obj_a, detail_a = self.get_version_object(version_a) obj_b, detail_b = self.get_version_object(version_b) for f in (self.opts.fields + self.opts.many_to_many): if isinstance(f, RelatedObject): label = f.opts.verbose_name else: label = f.verbose_name value_a = f.value_from_object(obj_a) value_b = f.value_from_object(obj_b) is_diff = value_a != value_b if type(value_a) in (list, tuple) and type(value_b) in (list, tuple) \ and len(value_a) == len(value_b) and is_diff: is_diff = False for i in xrange(len(value_a)): if value_a[i] != value_a[i]: is_diff = True break if type(value_a) is QuerySet and type(value_b) is QuerySet: is_diff = list(value_a) != list(value_b) diffs.append((label, detail_a.get_field_result( f.name).val, detail_b.get_field_result(f.name).val, is_diff)) context = super(RevisionListView, self).get_context() context.update({ 'object': self.obj, 'opts': self.opts, 'version_a': version_a, 'version_b': version_b, 'revision_a_url': self.model_admin_url('revision', quote(version_a.object_id), version_a.id), 'revision_b_url': self.model_admin_url('revision', quote(version_b.object_id), version_b.id), 'diffs': diffs }) return TemplateResponse( self.request, self.revision_diff_template or self.get_template_list('views/revision_diff.html'), context, current_app=self.admin_site.name) @filter_hook def get_media(self): return super(RevisionListView, self).get_media() + self.vendor('xadmin.plugin.revision.js', 'xadmin.form.css') class BaseRevisionView(ModelFormAdminView): @filter_hook def get_revision(self): return self.version.field_dict @filter_hook def get_form_datas(self): datas = {"instance": self.org_obj, "initial": self.get_revision()} if self.request_method == 'post': datas.update( {'data': self.request.POST, 'files': self.request.FILES}) return datas @filter_hook def get_context(self): context = super(BaseRevisionView, self).get_context() context.update({ 'object': self.org_obj }) return context @filter_hook def get_media(self): return super(BaseRevisionView, self).get_media() + self.vendor('xadmin.plugin.revision.js') class DiffField(Field): def render(self, form, form_style, context): html = '' for field in self.fields: html += ('<div class="diff_field" rel="tooltip"><textarea class="org-data" style="display:none;">%s</textarea>%s</div>' % (_('Current: %s') % self.attrs.pop('orgdata', ''), render_field(field, form, form_style, context, template=self.template, attrs=self.attrs))) return html class RevisionView(BaseRevisionView): revision_form_template = None def init_request(self, object_id, version_id): self.detail = self.get_model_view( DetailAdminView, self.model, object_id) self.org_obj = self.detail.obj self.version = get_object_or_404( Version, pk=version_id, object_id=unicode(self.org_obj.pk)) self.prepare_form() def get_form_helper(self): helper = super(RevisionView, self).get_form_helper() diff_fields = {} version_data = self.version.field_dict for f in self.opts.fields: if f.value_from_object(self.org_obj) != version_data.get(f.name, None): diff_fields[f.name] = self.detail.get_field_result(f.name).val for k, v in diff_fields.items(): helper[k].wrap(DiffField, orgdata=v) return helper @filter_hook def get_context(self): context = super(RevisionView, self).get_context() context["title"] = _( "Revert %s") % force_unicode(self.model._meta.verbose_name) return context @filter_hook def get_response(self): context = self.get_context() context.update(self.kwargs or {}) form_template = self.revision_form_template return TemplateResponse( self.request, form_template or self.get_template_list( 'views/revision_form.html'), context, current_app=self.admin_site.name) @filter_hook def post_response(self): self.message_user(_('The %(model)s "%(name)s" was reverted successfully. You may edit it again below.') % {"model": force_unicode(self.opts.verbose_name), "name": unicode(self.new_obj)}, 'success') return HttpResponseRedirect(self.model_admin_url('change', self.new_obj.pk)) class RecoverView(BaseRevisionView): recover_form_template = None def init_request(self, version_id): if not self.has_change_permission() and not self.has_add_permission(): raise PermissionDenied self.version = get_object_or_404(Version, pk=version_id) self.org_obj = self.version.object_version.object self.prepare_form() @filter_hook def get_context(self): context = super(RecoverView, self).get_context() context["title"] = _("Recover %s") % self.version.object_repr return context @filter_hook def get_response(self): context = self.get_context() context.update(self.kwargs or {}) form_template = self.recover_form_template return TemplateResponse( self.request, form_template or self.get_template_list( 'views/recover_form.html'), context, current_app=self.admin_site.name) @filter_hook def post_response(self): self.message_user(_('The %(model)s "%(name)s" was recovered successfully. You may edit it again below.') % {"model": force_unicode(self.opts.verbose_name), "name": unicode(self.new_obj)}, 'success') return HttpResponseRedirect(self.model_admin_url('change', self.new_obj.pk)) class InlineDiffField(Field): def render(self, form, form_style, context): html = '' instance = form.instance if not instance.pk: return super(InlineDiffField, self).render(form, form_style, context) initial = form.initial opts = instance._meta detail = form.detail for field in self.fields: f = opts.get_field(field) f_html = render_field(field, form, form_style, context, template=self.template, attrs=self.attrs) if f.value_from_object(instance) != initial.get(field, None): current_val = detail.get_field_result(f.name).val html += ('<div class="diff_field" rel="tooltip"><textarea class="org-data" style="display:none;">%s</textarea>%s</div>' % (_('Current: %s') % current_val, f_html)) else: html += f_html return html # inline hack plugin class InlineRevisionPlugin(BaseAdminPlugin): def get_related_versions(self, obj, version, formset): """Retreives all the related Version objects for the given FormSet.""" object_id = obj.pk # Get the fk name. try: fk_name = formset.fk.name except AttributeError: # This is a GenericInlineFormset, or similar. fk_name = formset.ct_fk_field.name # Look up the revision data. revision_versions = version.revision.version_set.all() related_versions = dict([(related_version.object_id, related_version) for related_version in revision_versions if ContentType.objects.get_for_id(related_version.content_type_id).model_class() == formset.model and unicode(related_version.field_dict[fk_name]) == unicode(object_id)]) return related_versions def _hack_inline_formset_initial(self, revision_view, formset): """Hacks the given formset to contain the correct initial data.""" # Now we hack it to push in the data from the revision! initial = [] related_versions = self.get_related_versions( revision_view.org_obj, revision_view.version, formset) formset.related_versions = related_versions for related_obj in formset.queryset: if unicode(related_obj.pk) in related_versions: initial.append( related_versions.pop(unicode(related_obj.pk)).field_dict) else: initial_data = model_to_dict(related_obj) initial_data["DELETE"] = True initial.append(initial_data) for related_version in related_versions.values(): initial_row = related_version.field_dict pk_name = ContentType.objects.get_for_id( related_version.content_type_id).model_class()._meta.pk.name del initial_row[pk_name] initial.append(initial_row) # Reconstruct the forms with the new revision data. formset.initial = initial formset.forms = [formset._construct_form( n) for n in xrange(len(initial))] # Hack the formset to force a save of everything. def get_changed_data(form): return [field.name for field in form.fields] for form in formset.forms: form.has_changed = lambda: True form._get_changed_data = partial(get_changed_data, form=form) def total_form_count_hack(count): return lambda: count formset.total_form_count = total_form_count_hack(len(initial)) if self.request.method == 'GET' and formset.helper and formset.helper.layout: helper = formset.helper helper.filter(basestring).wrap(InlineDiffField) fake_admin_class = type(str('%s%sFakeAdmin' % (self.opts.app_label, self.opts.module_name)), (object, ), {'model': self.model}) for form in formset.forms: instance = form.instance if instance.pk: form.detail = self.get_view( DetailAdminUtil, fake_admin_class, instance) def instance_form(self, formset, **kwargs): admin_view = self.admin_view.admin_view if hasattr(admin_view, 'version') and hasattr(admin_view, 'org_obj'): self._hack_inline_formset_initial(admin_view, formset) return formset # action revision class ActionRevisionPlugin(BaseAdminPlugin): revision_manager = default_revision_manager reversion_enable = False def init_request(self, *args, **kwargs): return self.reversion_enable @property def revision_context_manager(self): return self.revision_manager._revision_context_manager def do_action_func(self, __): def _method(): self.revision_context_manager.set_user(self.user) action_view = self.admin_view comment = action_view.description % model_format_dict(self.opts) self.revision_context_manager.set_comment(comment) return __() return _method def do_action(self, __, queryset): return self.revision_context_manager.create_revision(manage_manually=False)(self.do_action_func(__))() class ReversionAdmin(object): model_icon = 'exchange' class VersionAdmin(object): model_icon = 'file' site.register(Revision, ReversionAdmin) site.register(Version, VersionAdmin) site.register_modelview( r'^recover/$', RecoverListView, name='%s_%s_recoverlist') site.register_modelview( r'^recover/([^/]+)/$', RecoverView, name='%s_%s_recover') site.register_modelview( r'^([^/]+)/revision/$', RevisionListView, name='%s_%s_revisionlist') site.register_modelview( r'^([^/]+)/revision/([^/]+)/$', RevisionView, name='%s_%s_revision') site.register_plugin(ReversionPlugin, ListAdminView) site.register_plugin(ReversionPlugin, ModelFormAdminView) site.register_plugin(ReversionPlugin, DeleteAdminView) site.register_plugin(InlineRevisionPlugin, InlineModelAdmin) site.register_plugin(ActionRevisionPlugin, BaseActionView)
{ "repo_name": "lipengyu/django-bootstrap", "path": "xadmin/plugins/xversion.py", "copies": "1", "size": "25291", "license": "bsd-3-clause", "hash": -7592035629259623000, "line_mean": 38.5171875, "line_max": 183, "alpha_frac": 0.6190344391, "autogenerated": false, "ratio": 3.969083490269931, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0006154962746645343, "num_lines": 640 }
from functools import partial from django.contrib.gis.db.models import aggregates class BaseSpatialFeatures(object): gis_enabled = True # Does the database contain a SpatialRefSys model to store SRID information? has_spatialrefsys_table = True # Does the backend support the django.contrib.gis.utils.add_srs_entry() utility? supports_add_srs_entry = True # Does the backend introspect GeometryField to its subtypes? supports_geometry_field_introspection = True # Does the backend support storing 3D geometries? supports_3d_storage = False # Reference implementation of 3D functions is: # http://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions supports_3d_functions = False # Does the database support SRID transform operations? supports_transform = True # Do geometric relationship operations operate on real shapes (or only on bounding boxes)? supports_real_shape_operations = True # Can geometry fields be null? supports_null_geometries = True # Can the `distance` GeoQuerySet method be applied on geodetic coordinate systems? supports_distance_geodetic = True # Is the database able to count vertices on polygons (with `num_points`)? supports_num_points_poly = True # The following properties indicate if the database backend support # certain lookups (dwithin, left and right, relate, ...) supports_distances_lookups = True supports_left_right_lookups = False @property def supports_bbcontains_lookup(self): return 'bbcontains' in self.connection.ops.gis_operators @property def supports_contained_lookup(self): return 'contained' in self.connection.ops.gis_operators @property def supports_crosses_lookup(self): return 'crosses' in self.connection.ops.gis_operators @property def supports_dwithin_lookup(self): return 'dwithin' in self.connection.ops.gis_operators @property def supports_relate_lookup(self): return 'relate' in self.connection.ops.gis_operators # For each of those methods, the class will have a property named # `has_<name>_method` (defined in __init__) which accesses connection.ops # to determine GIS method availability. geoqueryset_methods = ( 'area', 'centroid', 'difference', 'distance', 'distance_spheroid', 'envelope', 'force_rhr', 'geohash', 'gml', 'intersection', 'kml', 'length', 'num_geom', 'perimeter', 'point_on_surface', 'reverse', 'scale', 'snap_to_grid', 'svg', 'sym_difference', 'transform', 'translate', 'union', 'unionagg', ) # Specifies whether the Collect and Extent aggregates are supported by the database @property def supports_collect_aggr(self): return aggregates.Collect not in self.connection.ops.disallowed_aggregates @property def supports_extent_aggr(self): return aggregates.Extent not in self.connection.ops.disallowed_aggregates @property def supports_make_line_aggr(self): return aggregates.MakeLine not in self.connection.ops.disallowed_aggregates def __init__(self, *args): super(BaseSpatialFeatures, self).__init__(*args) for method in self.geoqueryset_methods: # Add dynamically properties for each GQS method, e.g. has_force_rhr_method, etc. setattr(self.__class__, 'has_%s_method' % method, property(partial(BaseSpatialFeatures.has_ops_method, method=method))) def has_ops_method(self, method): return getattr(self.connection.ops, method, False)
{ "repo_name": "PetrDlouhy/django", "path": "django/contrib/gis/db/backends/base/features.py", "copies": "10", "size": "3628", "license": "bsd-3-clause", "hash": -1307533993389943800, "line_mean": 39.3111111111, "line_max": 94, "alpha_frac": 0.6992833517, "autogenerated": false, "ratio": 4.151029748283753, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9850313099983752, "avg_score": null, "num_lines": null }
from functools import partial from django.contrib import admin from django.core.exceptions import ObjectDoesNotExist from django.forms.models import modelform_factory from django.contrib.admin.views.main import ChangeList from django.forms.models import model_to_dict from django.contrib.auth import get_user_model from widgy.admin import WidgyAdmin from widgy.forms import WidgyForm from widgy.models import Node from .models import Blog, BlogLayout, Tag User = get_user_model() class IsPublishedListFilter(admin.SimpleListFilter): title = 'Published' parameter_name = 'is_published' model = BlogLayout def lookups(self, request, model_admin): return ( ('0', 'No'), ('1', 'Yes'), ) def queryset(self, request, queryset): if self.value() == '0': return queryset.exclude( content__commits__root_node__content_id__in=self.model.objects.published() ).distinct() if self.value() == '1': return queryset.filter( content__commits__root_node__content_id__in=self.model.objects.published() ).distinct() class AuthorListFilter(admin.SimpleListFilter): title = 'Current author' parameter_name = 'author' def lookups(self, request, model_admin): for user in User.objects.filter(blog_bloglayout_set__isnull=False).distinct(): yield (str(user.pk), str(user)) def queryset(self, request, queryset): pk = self.value() if pk: layouts_by_this_author = BlogLayout.objects.filter(author__pk=pk) return queryset.filter( content__working_copy__content_id__in=layouts_by_this_author ).distinct() class BlogForm(WidgyForm): def __init__(self, *args, **kwargs): instance = kwargs.get('instance') if instance: try: content = instance.content.working_copy.content except ObjectDoesNotExist: pass else: opts = self._meta initial = model_to_dict(content, opts.fields, opts.exclude) initial.update(kwargs.get('initial', {})) kwargs['initial'] = initial super(BlogForm, self).__init__(*args, **kwargs) class BlogChangeList(ChangeList): def get_results(self, request): super(BlogChangeList, self).get_results(request) # This is like prefetch_related, but works with our GenericForeignKey Node.attach_content_instances(i.content.working_copy for i in self.result_list) class BlogAdmin(WidgyAdmin): form = BlogForm layout_model = BlogLayout # These are the fields that are actually stored in widgy, not the # owner. We copy them back and forth to make the editing interface # nicer. layout_proxy_fields = [ 'title', 'slug', 'date', 'author', 'image', 'summary', 'description', 'keywords', 'page_title', 'tags', ] list_filter = [IsPublishedListFilter, AuthorListFilter] list_display = ['title', 'author'] fieldsets = [ (None, { 'fields': [ 'title', 'date', 'author', 'image', 'summary', 'content', 'tags', ], }), ('Meta', { 'fields': ['description', 'keywords', 'slug', 'page_title'], 'classes': ['collapse', 'grp-collapse', 'collapse-closed', 'collapsed'], }), ] def get_queryset(self, request): return self.model.objects.select_related('content__working_copy') queryset = get_queryset def get_changelist(self, *args, **kwargs): return BlogChangeList def get_form(self, request, obj=None, **kwargs): # We need to get the fields for BlogLayout defaults = { 'formfield_callback': partial(self.formfield_for_dbfield, request=request), 'form': self.form, 'fields': self.layout_proxy_fields, } defaults.update(kwargs) LayoutModelForm = modelform_factory(self.layout_model, **defaults) LayoutForm = type('BlogLayoutForm', (self.form,), LayoutModelForm.base_fields) LayoutForm.layout_proxy_fields = self.layout_proxy_fields kwargs['form'] = LayoutForm return super(BlogAdmin, self).get_form(request, obj, **kwargs) def save_model(self, request, obj, form, change): layout_data = dict( (k, v) for k, v in form.cleaned_data.items() if k in self.layout_proxy_fields ) if not change: # adding tags = layout_data.pop('tags', []) field = self.model._meta.get_field('content') obj.content = field.add_root(obj, layout_data) obj.content.working_copy.content.tags = tags else: # editing content = obj.content.working_copy.content for field_name, value in layout_data.items(): setattr(content, field_name, value) content.save() return super(BlogAdmin, self).save_model(request, obj, form, change) admin.site.register(Blog, BlogAdmin) admin.site.register(Tag, admin.ModelAdmin)
{ "repo_name": "fusionbox/django-widgy-blog", "path": "widgy_blog/admin.py", "copies": "1", "size": "5267", "license": "bsd-2-clause", "hash": -8605161531530276000, "line_mean": 32.1257861635, "line_max": 90, "alpha_frac": 0.6005316119, "autogenerated": false, "ratio": 4.073472544470224, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0007423085568665333, "num_lines": 159 }
from functools import partial from django.contrib import admin from django.utils.datastructures import SortedDict from django.http import HttpResponseRedirect from django.contrib.admin.options import BaseModelAdmin from django.utils.deprecation import RenameMethodsBase from django.forms.widgets import MediaDefiningClass try: from functools import update_wrapper except ImportError: from django.utils.functional import update_wrapper class RenameBaseModelAdminMethods(MediaDefiningClass, RenameMethodsBase): renamed_methods = ( ('queryset', 'get_queryset', PendingDeprecationWarning), ) class ButtonableModelAdmin(admin.ModelAdmin): buttons = () def change_view(self, request, object_id, extra_context=None): obj = self.get_object(request, admin.util.unquote(object_id)) extra = {'buttons': self.get_buttons(request, obj).values()} extra.update(extra_context or {}) return super(ButtonableModelAdmin, self).change_view(request, object_id, extra_context=extra) def button_view_dispatcher(self, request, object_id, command): obj = self.get_object(request, admin.util.unquote(object_id)) response = self.get_buttons(request, obj)[command][0](request, obj) return response or HttpResponseRedirect(request.META['HTTP_REFERER']) def get_buttons(self, request, obj): """ Return a dictionary mapping the names of all buttons for this ModelAdmin to a tuple of (callable, name, description) for each button. Each button may assign 'condition', which chould be callable with following attrs: self, request, obj """ buttons = SortedDict() for name in self.buttons: handler = getattr(self, name) if getattr(handler, 'condition', lambda self, request, obj: True)(self, request, obj): buttons[name] = (handler, name, getattr(handler, 'short_description', name.replace('_', ' '))) return buttons def get_urls(self): from django.conf.urls import patterns, url def wrap(view): def wrapper(*args, **kwargs): return self.admin_site.admin_view(view)(*args, **kwargs) return update_wrapper(wrapper, view) return patterns('', *(url(r'^(\d+)/(%s)/$' % command, wrap(self.button_view_dispatcher)) for command in self.buttons) ) + super(ButtonableModelAdmin, self).get_urls() class ModelAdminWithForeignKeyLinksMetaclass(RenameBaseModelAdminMethods): def __new__(cls, name, bases, attrs): new_class = super(ModelAdminWithForeignKeyLinksMetaclass, cls).__new__(cls, name, bases, attrs) def foreign_key_link(instance, field): target = getattr(instance, field) return u'<a href="../../%s/%s/%s/">%s</a>' % ( target._meta.app_label, target._meta.model_name, target.pk, unicode(target)) for col in new_class.list_display: if col[:8] == 'link_to_': field_name = col[8:] method = partial(foreign_key_link, field=field_name) method.__name__ = col[8:] method.allow_tags = True method.admin_order_field = field_name setattr(new_class, col, method) return new_class class AdminURLMixin(object): def wrap(self, view): def wrapper(*args, **kwargs): return self.admin_site.admin_view(view)(*args, **kwargs) return update_wrapper(wrapper, view)
{ "repo_name": "uolter/django-model-admin-helper", "path": "src/admin_helpers.py", "copies": "1", "size": "3578", "license": "mit", "hash": 4890874406242098000, "line_mean": 35.8865979381, "line_max": 109, "alpha_frac": 0.6416992733, "autogenerated": false, "ratio": 4.224321133412042, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5366020406712042, "avg_score": null, "num_lines": null }
from functools import partial from django.contrib import admin from fluent_contents import extensions from fluent_contents.admin.placeholdereditor import PlaceholderEditorInline, PlaceholderEditorAdmin from fluent_contents.models import PlaceholderData from fluent_contents.models.fields import PlaceholderField class PlaceholderFieldInline(PlaceholderEditorInline): """ The inline used to process placeholder fields. """ template = "admin/fluent_contents/placeholderfield/inline_init.html" class PlaceholderFieldAdmin(PlaceholderEditorAdmin): """ The base functionality for :class:`~django.contrib.admin.ModelAdmin` dialogs to display placeholder fields. This class loads the :class:`~fluent_contents.models.ContentItem` inlines, and initializes the frontend editor for the :class:`~fluent_contents.models.PlaceholderField`. The placeholder will be displayed in the admin: .. image:: /images/admin/placeholderfieldadmin1.png :width: 770px :height: 562px :alt: django-fluent-contents placeholder field preview """ placeholder_inline = PlaceholderFieldInline def get_form(self, request, obj=None, **kwargs): kwargs['formfield_callback'] = partial( self.formfield_for_dbfield, request=request, obj=obj) return super(PlaceholderFieldAdmin, self).get_form( request, obj=obj, **kwargs) def formfield_for_dbfield(self, db_field, **kwargs): obj = kwargs.pop('obj', None) if isinstance(db_field, PlaceholderField): kwargs['parent_object'] = obj return super(PlaceholderFieldAdmin, self).formfield_for_dbfield( db_field, **kwargs) def get_placeholder_data(self, request, obj=None): """ Return the data of the placeholder fields. """ # Return all placeholder fields in the model. if not hasattr(self.model, '_meta_placeholder_fields'): return [] data = [] for name, field in self.model._meta_placeholder_fields.items(): assert isinstance(field, PlaceholderField) data.append(PlaceholderData( slot=field.slot, title=field.verbose_name.capitalize(), fallback_language=None, # Information cant' be known by "render_placeholder" in the template. )) return data def get_all_allowed_plugins(self): """ Return which plugins are allowed by the placeholder fields. """ # Get all allowed plugins of the various placeholders together. if not hasattr(self.model, '_meta_placeholder_fields'): # No placeholder fields in the model, no need for inlines. return [] plugins = [] for name, field in self.model._meta_placeholder_fields.items(): assert isinstance(field, PlaceholderField) if field.plugins is None: # no limitations, so all is allowed return extensions.plugin_pool.get_plugins() else: plugins += field.plugins return list(set(plugins))
{ "repo_name": "jpotterm/django-fluent-contents", "path": "fluent_contents/admin/placeholderfield.py", "copies": "1", "size": "3147", "license": "apache-2.0", "hash": 6300135112442798000, "line_mean": 35.5930232558, "line_max": 111, "alpha_frac": 0.6596758818, "autogenerated": false, "ratio": 4.482905982905983, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5642581864705983, "avg_score": null, "num_lines": null }
from functools import partial from django.contrib import auth from django.contrib.auth import load_backend from django.contrib.auth.backends import RemoteUserBackend from django.core.exceptions import ImproperlyConfigured from django.utils.deprecation import MiddlewareMixin from django.utils.functional import SimpleLazyObject class AuthenticationMiddleware(MiddlewareMixin): def process_request(self, request): assert hasattr(request, 'session'), ( "The Django authentication middleware requires session middleware " "to be installed. Edit your MIDDLEWARE setting to insert " "'django.contrib.sessions.middleware.SessionMiddleware' before " "'django.contrib.auth.middleware.AuthenticationMiddleware'." ) request.user = SimpleLazyObject(partial(auth.get_user, request)) class RemoteUserMiddleware(MiddlewareMixin): """ Middleware for utilizing Web-server-provided authentication. If request.user is not authenticated, then this middleware attempts to authenticate the username passed in the ``REMOTE_USER`` request header. If authentication is successful, the user is automatically logged in to persist the user in the session. The header used is configurable and defaults to ``REMOTE_USER``. Subclass this class and change the ``header`` attribute if you need to use a different header. """ # Name of request header to grab username from. This will be the key as # used in the request.META dictionary, i.e. the normalization of headers to # all uppercase and the addition of "HTTP_" prefix apply. header = "REMOTE_USER" force_logout_if_no_header = True def process_request(self, request): # AuthenticationMiddleware is required so that request.user exists. if not hasattr(request, 'user'): raise ImproperlyConfigured( "The Django remote user auth middleware requires the" " authentication middleware to be installed. Edit your" " MIDDLEWARE setting to insert" " 'django.contrib.auth.middleware.AuthenticationMiddleware'" " before the RemoteUserMiddleware class.") try: username = request.META[self.header] except KeyError: # If specified header doesn't exist then remove any existing # authenticated remote-user, or return (leaving request.user set to # AnonymousUser by the AuthenticationMiddleware). if self.force_logout_if_no_header and request.user.is_authenticated: self._remove_invalid_user(request) return # If the user is already authenticated and that user is the user we are # getting passed in the headers, then the correct user is already # persisted in the session and we don't need to continue. if request.user.is_authenticated: if request.user.get_username() == self.clean_username(username, request): return else: # An authenticated user is associated with the request, but # it does not match the authorized user in the header. self._remove_invalid_user(request) # We are seeing this user for the first time in this session, attempt # to authenticate the user. user = auth.authenticate(request, remote_user=username) if user: # User is valid. Set request.user and persist user in the session # by logging the user in. request.user = user auth.login(request, user) def clean_username(self, username, request): """ Allow the backend to clean the username, if the backend defines a clean_username method. """ backend_str = request.session[auth.BACKEND_SESSION_KEY] backend = auth.load_backend(backend_str) try: username = backend.clean_username(username) except AttributeError: # Backend has no clean_username method. pass return username def _remove_invalid_user(self, request): """ Remove the current authenticated user in the request which is invalid but only if the user is authenticated via the RemoteUserBackend. """ try: stored_backend = load_backend(request.session.get(auth.BACKEND_SESSION_KEY, '')) except ImportError: # backend failed to load auth.logout(request) else: if isinstance(stored_backend, RemoteUserBackend): auth.logout(request) class PersistentRemoteUserMiddleware(RemoteUserMiddleware): """ Middleware for Web-server provided authentication on logon pages. Like RemoteUserMiddleware but keeps the user authenticated even if the header (``REMOTE_USER``) is not found in the request. Useful for setups when the external authentication via ``REMOTE_USER`` is only expected to happen on some "logon" URL and the rest of the application wants to use Django's authentication mechanism. """ force_logout_if_no_header = False
{ "repo_name": "georgemarshall/django", "path": "django/contrib/auth/middleware.py", "copies": "1", "size": "5191", "license": "bsd-3-clause", "hash": 3993536972514674700, "line_mean": 42.9915254237, "line_max": 92, "alpha_frac": 0.6709689848, "autogenerated": false, "ratio": 4.920379146919431, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6091348131719431, "avg_score": null, "num_lines": null }
from functools import partial from django.contrib import messages as django_messages from django.template import loader from django.utils import safestring from rest_framework.request import Request import jinja2 """ This file was created because AMO wants to have multi-line messages including a title and some content. Django's messages framework only takes a single string. Importing this file should behave exactly like Django's messages framework except it will take a 3rd argument as message content (the second is the message title). """ class DoubleSafe(safestring.SafeData, jinja2.Markup): """Double safe all the way: marks safe for django and jinja2. Even though we're using jinja2 for most of the template rendering, we may have places where it's Django deciding whether the data is safe or not. An example is the messaging framework. If we add a new message that is marked safe for jinja2 (using a Markup object), it's not persisted that way by Django, and we thus loose the "safeness" of the message. This serves to give us the best of both worlds. """ def _make_message(title=None, message=None, title_safe=False, message_safe=False): c = {'title': title, 'message': message, 'title_safe': title_safe, 'message_safe': message_safe} t = loader.get_template('message_content.html').render(c) return DoubleSafe(t) def _is_dupe(msg, request): """Returns whether a particular message is already cued for display.""" storage = django_messages.get_messages(request) # If there are no messages stored, Django doesn't give us a proper storage # object, so just bail early. if not storage: return False try: smsg = unicode(msg) is_dupe = False for message in storage: if unicode(message) == smsg: # We can't return from here because we need to tell Django not # to consume the messages. is_dupe = True break except (UnicodeDecodeError, UnicodeEncodeError): return False storage.used = False return is_dupe def _file_message(type_, request, title, message=None, extra_tags='', fail_silently=False, title_safe=False, message_safe=False): msg = _make_message(title, message, title_safe, message_safe) # Don't save duplicates. if _is_dupe(msg, request): return if isinstance(request, Request): # Support for passing of django-rest-framework wrapped request objects request = request._request getattr(django_messages, type_)(request, msg, extra_tags, fail_silently) debug = partial(_file_message, 'debug') info = partial(_file_message, 'info') success = partial(_file_message, 'success') warning = partial(_file_message, 'warning') error = partial(_file_message, 'error')
{ "repo_name": "tsl143/addons-server", "path": "src/olympia/amo/messages.py", "copies": "1", "size": "2878", "license": "bsd-3-clause", "hash": 66513485841310880, "line_mean": 32.8588235294, "line_max": 79, "alpha_frac": 0.6879777623, "autogenerated": false, "ratio": 4.1410071942446045, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 85 }
from functools import partial from django.contrib import messages as django_messages from django.template import loader from django.utils import safestring import jinja2 import six from rest_framework.request import Request """ This file was created because AMO wants to have multi-line messages including a title and some content. Django's messages framework only takes a single string. Importing this file should behave exactly like Django's messages framework except it will take a 3rd argument as message content (the second is the message title). """ class DoubleSafe(safestring.SafeData, jinja2.Markup): """Double safe all the way: marks safe for django and jinja2. Even though we're using jinja2 for most of the template rendering, we may have places where it's Django deciding whether the data is safe or not. An example is the messaging framework. If we add a new message that is marked safe for jinja2 (using a Markup object), it's not persisted that way by Django, and we thus loose the "safeness" of the message. This serves to give us the best of both worlds. """ def _make_message(title=None, message=None, title_safe=False, message_safe=False): c = {'title': title, 'message': message, 'title_safe': title_safe, 'message_safe': message_safe} t = loader.get_template('message_content.html').render(c) return DoubleSafe(t) def _is_dupe(msg, request): """Returns whether a particular message is already cued for display.""" storage = django_messages.get_messages(request) # If there are no messages stored, Django doesn't give us a proper storage # object, so just bail early. if not storage: return False try: smsg = six.text_type(msg) is_dupe = False for message in storage: if six.text_type(message) == smsg: # We can't return from here because we need to tell Django not # to consume the messages. is_dupe = True break except (UnicodeDecodeError, UnicodeEncodeError): return False storage.used = False return is_dupe def _file_message(type_, request, title, message=None, extra_tags='', fail_silently=False, title_safe=False, message_safe=False): msg = _make_message(title, message, title_safe, message_safe) # Don't save duplicates. if _is_dupe(msg, request): return if isinstance(request, Request): # Support for passing of django-rest-framework wrapped request objects request = request._request getattr(django_messages, type_)(request, msg, extra_tags, fail_silently) debug = partial(_file_message, 'debug') info = partial(_file_message, 'info') success = partial(_file_message, 'success') warning = partial(_file_message, 'warning') error = partial(_file_message, 'error')
{ "repo_name": "wagnerand/olympia", "path": "src/olympia/amo/messages.py", "copies": "2", "size": "2903", "license": "bsd-3-clause", "hash": -7315835593926434000, "line_mean": 31.9886363636, "line_max": 79, "alpha_frac": 0.6879090596, "autogenerated": false, "ratio": 4.106082036775106, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5793991096375106, "avg_score": null, "num_lines": null }
from functools import partial from django.contrib import messages as django_messages from django.template import loader from django.utils import safestring import jinja2 from rest_framework.request import Request """ This file was created because AMO wants to have multi-line messages including a title and some content. Django's messages framework only takes a single string. Importing this file should behave exactly like Django's messages framework except it will take a 3rd argument as message content (the second is the message title). """ class DoubleSafe(safestring.SafeData, jinja2.Markup): """Double safe all the way: marks safe for django and jinja2. Even though we're using jinja2 for most of the template rendering, we may have places where it's Django deciding whether the data is safe or not. An example is the messaging framework. If we add a new message that is marked safe for jinja2 (using a Markup object), it's not persisted that way by Django, and we thus loose the "safeness" of the message. This serves to give us the best of both worlds. """ def _make_message(title=None, message=None, title_safe=False, message_safe=False): c = {'title': title, 'message': message, 'title_safe': title_safe, 'message_safe': message_safe} t = loader.get_template('message_content.html').render(c) return DoubleSafe(t) def _is_dupe(msg, request): """Returns whether a particular message is already cued for display.""" storage = django_messages.get_messages(request) # If there are no messages stored, Django doesn't give us a proper storage # object, so just bail early. if not storage: return False try: smsg = unicode(msg) is_dupe = False for message in storage: if unicode(message) == smsg: # We can't return from here because we need to tell Django not # to consume the messages. is_dupe = True break except (UnicodeDecodeError, UnicodeEncodeError): return False storage.used = False return is_dupe def _file_message(type_, request, title, message=None, extra_tags='', fail_silently=False, title_safe=False, message_safe=False): msg = _make_message(title, message, title_safe, message_safe) # Don't save duplicates. if _is_dupe(msg, request): return if isinstance(request, Request): # Support for passing of django-rest-framework wrapped request objects request = request._request getattr(django_messages, type_)(request, msg, extra_tags, fail_silently) debug = partial(_file_message, 'debug') info = partial(_file_message, 'info') success = partial(_file_message, 'success') warning = partial(_file_message, 'warning') error = partial(_file_message, 'error')
{ "repo_name": "lavish205/olympia", "path": "src/olympia/amo/messages.py", "copies": "3", "size": "2880", "license": "bsd-3-clause", "hash": 7823471964134682000, "line_mean": 32.1034482759, "line_max": 79, "alpha_frac": 0.6875, "autogenerated": false, "ratio": 4.137931034482759, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6325431034482759, "avg_score": null, "num_lines": null }
from functools import partial from django.contrib import messages as django_messages from django.utils import safestring from rest_framework.request import Request import jinja2 from jingo import get_env """ This file was created because AMO wants to have multi-line messages including a title and some content. Django's messages framework only takes a single string. Importing this file should behave exactly like Django's messages framework except it will take a 3rd argument as message content (the second is the message title). """ class DoubleSafe(safestring.SafeData, jinja2.Markup): """Double safe all the way: marks safe for django and jinja2. Even though we're using jinja2 for most of the template rendering, we may have places where it's Django deciding whether the data is safe or not. An example is the messaging framework. If we add a new message that is marked safe for jinja2 (using a Markup object), it's not persisted that way by Django, and we thus loose the "safeness" of the message. This serves to give us the best of both worlds. """ def _make_message(title=None, message=None, title_safe=False, message_safe=False): c = {'title': title, 'message': message, 'title_safe': title_safe, 'message_safe': message_safe} t = get_env().get_template('message_content.html').render(c) return DoubleSafe(t) def _is_dupe(msg, request): """Returns whether a particular message is already cued for display.""" storage = django_messages.get_messages(request) # If there are no messages stored, Django doesn't give us a proper storage # object, so just bail early. if not storage: return False try: smsg = unicode(msg) is_dupe = False for message in storage: if unicode(message) == smsg: # We can't return from here because we need to tell Django not # to consume the messages. is_dupe = True break except (UnicodeDecodeError, UnicodeEncodeError): return False storage.used = False return is_dupe def _file_message(type_, request, title, message=None, extra_tags='', fail_silently=False, title_safe=False, message_safe=False): msg = _make_message(title, message, title_safe, message_safe) # Don't save duplicates. if _is_dupe(msg, request): return if isinstance(request, Request): # Support for passing of django-rest-framework wrapped request objects request = request._request getattr(django_messages, type_)(request, msg, extra_tags, fail_silently) debug = partial(_file_message, 'debug') info = partial(_file_message, 'info') success = partial(_file_message, 'success') warning = partial(_file_message, 'warning') error = partial(_file_message, 'error')
{ "repo_name": "andymckay/olympia", "path": "src/olympia/amo/messages.py", "copies": "7", "size": "2872", "license": "bsd-3-clause", "hash": 4328277233735383600, "line_mean": 32.7882352941, "line_max": 79, "alpha_frac": 0.686281337, "autogenerated": false, "ratio": 4.108726752503577, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8295008089503576, "avg_score": null, "num_lines": null }
from functools import partial from django.contrib import messages as django_messages from django.utils import safestring import jinja2 from jingo import env """ This file was created because AMO wants to have multi-line messages including a title and some content. Django's messages framework only takes a single string. Importing this file should behave exactly like Django's messages framework except it will take a 3rd argument as message content (the second is the message title). """ class DoubleSafe(safestring.SafeData, jinja2.Markup): """Double safe all the way: marks safe for django and jinja2. Even though we're using jinja2 for most of the template rendering, we may have places where it's Django deciding whether the data is safe or not. An example is the messaging framework. If we add a new message that is marked safe for jinja2 (using a Markup object), it's not persisted that way by Django, and we thus loose the "safeness" of the message. This serves to give us the best of both worlds. """ def _make_message(title=None, message=None, title_safe=False, message_safe=False): c = {'title': title, 'message': message, 'title_safe': title_safe, 'message_safe': message_safe} t = env.get_template('message_content.html').render(c) return DoubleSafe(t) def _is_dupe(msg, request): """Returns whether a particular message is already cued for display.""" storage = django_messages.get_messages(request) # If there are no messages stored, Django doesn't give us a proper storage # object, so just bail early. if not storage: return False try: smsg = unicode(msg) is_dupe = False for message in storage: if unicode(message) == smsg: # We can't return from here because we need to tell Django not # to consume the messages. is_dupe = True break except (UnicodeDecodeError, UnicodeEncodeError): return False storage.used = False return is_dupe def _file_message(type_, request, title, message=None, extra_tags='', fail_silently=False, title_safe=False, message_safe=False): msg = _make_message(title, message, title_safe, message_safe) # Don't save duplicates. if _is_dupe(msg, request): return getattr(django_messages, type_)(request, msg, extra_tags, fail_silently) debug = partial(_file_message, 'debug') info = partial(_file_message, 'info') success = partial(_file_message, 'success') warning = partial(_file_message, 'warning') error = partial(_file_message, 'error')
{ "repo_name": "mrrrgn/olympia", "path": "apps/amo/messages.py", "copies": "16", "size": "2666", "license": "bsd-3-clause", "hash": -9053123608125987000, "line_mean": 32.746835443, "line_max": 79, "alpha_frac": 0.6834208552, "autogenerated": false, "ratio": 4.051671732522796, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 79 }
from functools import partial from django.contrib import messages as django_messages import jinja2 from jingo import env """ This file was created because AMO wants to have multi-line messages including a title and some content. Django's messages framework only takes a single string. Importing this file should behave exactly like Django's messages framework except it will take a 3rd argument as message content (the second is the message title). """ def _make_message(title=None, message=None, title_safe=False, message_safe=False): c = {'title': title, 'message': message, 'title_safe': title_safe, 'message_safe': message_safe} t = env.get_template('message_content.html').render(c) return jinja2.Markup(t) def _is_dupe(msg, request): """Returns whether a particular message is already cued for display.""" storage = django_messages.get_messages(request) # If there are no messages stored, Django doesn't give us a proper storage # object, so just bail early. if not storage: return False try: smsg = unicode(msg) is_dupe = False for message in storage: if unicode(message) == smsg: # We can't return from here because we need to tell Django not # to consume the messages. is_dupe = True break except (UnicodeDecodeError, UnicodeEncodeError): return False storage.used = False return is_dupe def _file_message(type_, request, title, message=None, extra_tags='', fail_silently=False, title_safe=False, message_safe=False): msg = _make_message(title, message, title_safe, message_safe) # Don't save duplicates. if _is_dupe(msg, request): return getattr(django_messages, type_)(request, msg, extra_tags, fail_silently) debug = partial(_file_message, 'debug') info = partial(_file_message, 'info') success = partial(_file_message, 'success') warning = partial(_file_message, 'warning') error = partial(_file_message, 'error')
{ "repo_name": "clouserw/olympia", "path": "apps/amo/messages.py", "copies": "3", "size": "2100", "license": "bsd-3-clause", "hash": -3861206516338948600, "line_mean": 32.8709677419, "line_max": 80, "alpha_frac": 0.6614285714, "autogenerated": false, "ratio": 4.093567251461988, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6254995822861988, "avg_score": null, "num_lines": null }
from functools import partial from django.contrib import messages from django.contrib.auth import logout, update_session_auth_hash from django.contrib.auth.forms import PasswordResetForm from django.contrib.auth.tokens import default_token_generator from django.contrib.auth.views import password_reset, password_reset_done from django.core.urlresolvers import reverse from django.http.response import HttpResponseRedirect, Http404 from django.shortcuts import redirect from django.utils.safestring import mark_safe from django.views.decorators.csrf import csrf_protect from django.views.generic import CreateView from django.views.generic.base import TemplateView from django.conf import settings from accounts.forms import ( UserForm, InstructorForm, ChangePasswordForm, DeleteAccountForm, ChangeEmailForm, CreatePasswordForm) from accounts.models import Instructor from ctms.views import json_response from mysite.mixins import NotAnonymousRequiredMixin from psa.custom_backends import EmailAuth from psa.custom_django_storage import CustomDjangoStorage, CustomCode from psa.custom_django_strategy import CustomDjangoStrategy from .forms import SocialForm class AccountSettingsView(NotAnonymousRequiredMixin, TemplateView): template_name = 'accounts/settings.html' def get_instructor(self): try: return self.request.user.instructor except Instructor.DoesNotExist: return Instructor(user=self.request.user) def get_password_form_cls(self): return ChangePasswordForm if self.request.user.has_usable_password() else CreatePasswordForm def get(self, request): return self.render_to_response( dict( user_form=UserForm(instance=request.user), instructor_form=InstructorForm(instance=self.get_instructor()), password_form=self.get_password_form_cls()(instance=request.user), delete_account_form=DeleteAccountForm(instance=request.user), email_form=ChangeEmailForm(initial={'email': request.user.email}), person=request.user ) ) def post(self, request): instructor = self.get_instructor() form_name = { 'user_form': partial(UserForm, instance=request.user), 'instructor_form': partial(InstructorForm, instance=instructor), 'email_form': partial(ChangeEmailForm, initial={'email': request.user.email}), 'password_form': partial(self.get_password_form_cls(), instance=request.user), 'delete_account_form': partial(DeleteAccountForm, instance=request.user), } form_save_part = { 'user_form': lambda form_obj: partial(form_obj.save), 'instructor_form': lambda form_obj: partial(form_obj.save), 'email_form': lambda form_obj: partial(form_obj.save, request, commit=False), 'password_form': lambda form_obj: partial(form_obj.save), 'delete_account_form': lambda form_obj: partial(form_obj.save), } kwargs = {} has_errors = False non_field_errors_list = [] do_email_saving = False def get_form_changed_data(form): data = form.changed_data if 'form_id' in form.changed_data: data.pop(data.index('form_id')) return data for form_id, form_cls in form_name.items(): if form_id in request.POST.getlist('form_id'): form = form_cls(data=request.POST) changed_data = get_form_changed_data(form) if form.is_valid() and changed_data: save = form_save_part[form_id](form) if form_id == 'email_form': do_email_saving = True email_save = save elif form_id == 'password_form': save() update_session_auth_hash(request, request.user) else: save() elif changed_data: has_errors = True non_field_errors_list.append(unicode(form.non_field_errors())) kwargs[form_id] = form else: kwargs[form_id] = form_cls() if do_email_saving: return email_save() kwargs['person'] = request.user if not has_errors: return HttpResponseRedirect(reverse('accounts:settings')) else: msg = u"Please correct errors below: <br> {}".format(u"<br>".join(non_field_errors_list)) messages.add_message(request, messages.WARNING, mark_safe(msg)) return self.render_to_response( kwargs ) class DeleteAccountView(NotAnonymousRequiredMixin, TemplateView): template_name = 'accounts/settings.html' def post(self, request): form = DeleteAccountForm(request.POST, instance=request.user) if form.is_valid(): form.save() logout(request) return HttpResponseRedirect(reverse('accounts:deleted')) return self.render_to_response( dict( user_form=UserForm(instance=request.user), instructor_form=InstructorForm(instance=request.user.instructor), password_form=ChangePasswordForm(), delete_account_form=form, person=request.user ) ) class ProfileUpdateView(NotAnonymousRequiredMixin, CreateView): template_name = 'accounts/profile_edit.html' model = Instructor form_class = SocialForm def get_success_url(self): if self.request.POST.get('next'): return self.request.POST.get('next') return reverse('ctms:my_courses') def get_initial(self): return {'user': self.request.user} def get_form_kwargs(self): """ Returns the keyword arguments for instantiating the form. """ kwargs = { 'initial': self.get_initial(), 'prefix': self.get_prefix(), 'instance': self.get_instance() } if self.request.method in ('POST', 'PUT'): kwargs.update({ 'data': self.request.POST, 'files': self.request.FILES, }) return kwargs def get_instance(self): try: instructor = self.request.user.instructor except self.request.user._meta.model.instructor.RelatedObjectDoesNotExist: instructor = None return instructor def get(self, request): instructor = self.get_instance() if instructor is not None and instructor.institution and instructor.what_do_you_teach: return redirect(self.request.GET.get('next') or self.get_success_url()) else: form = self.get_form() return self.render_to_response({'form': form}) @csrf_protect def custom_password_reset(request, template_name='registration/password_reset_form.html', email_template_name='registration/password_reset_email.html', subject_template_name='registration/password_reset_subject.txt', password_reset_form=PasswordResetForm, token_generator=default_token_generator, post_reset_redirect=None, from_email=settings.EMAIL_FROM, current_app=None, extra_context=None, html_email_template_name=None): response = password_reset(request, template_name=template_name, email_template_name=email_template_name, subject_template_name=subject_template_name, password_reset_form=password_reset_form, token_generator=token_generator, post_reset_redirect=post_reset_redirect, from_email=from_email, current_app=current_app, extra_context=extra_context, html_email_template_name=html_email_template_name) if request.method == 'POST' and isinstance(response, HttpResponseRedirect): request.session['anonym_user_email'] = request.POST.get('email') return response def custom_password_reset_done(request, template_name='registration/password_reset_done.html', current_app=None, extra_context=None): if extra_context: extra_context.update({'anonym_user_email': request.session.get('anonym_user_email')}) else: extra_context = {'anonym_user_email': request.session.get('anonym_user_email')} return password_reset_done(request, template_name=template_name, current_app=current_app, extra_context=extra_context) def resend_email_confirmation_link(request): email = request.POST.get('email') session_email = request.session.get('resend_user_email') cc_id = request.session.get('cc_id') if session_email != email: raise Http404() if request.user.is_authenticated(): logout(request) request.session['resend_user_email'] = session_email request.session['cc_id'] = cc_id def resend(request): if CustomCode.objects.filter(email=email, verified=True).count(): return {'ok': 0, 'error': 'Email {} already verified!'.format(email)} try: post = request.POST.dict() cc = CustomCode.objects.filter(pk=request.session.get('cc_id')).first() fields = ['first_name', 'last_name', 'institution', 'next'] if cc: for field in fields: if field == 'next': post[field] = getattr(cc, 'next_page', None) else: post[field] = getattr(cc, field, '') request.POST = post strategy = CustomDjangoStrategy(CustomDjangoStorage, request=request) strategy.send_email_validation(EmailAuth, email) return {'ok': 1} except Exception as e: return {'ok': 0, 'error': e.message} return json_response(resend(request))
{ "repo_name": "raccoongang/socraticqs2", "path": "mysite/accounts/views.py", "copies": "1", "size": "10523", "license": "apache-2.0", "hash": 3352352073568282600, "line_mean": 40.2666666667, "line_max": 101, "alpha_frac": 0.5948873895, "autogenerated": false, "ratio": 4.4177162048698575, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5512603594369857, "avg_score": null, "num_lines": null }
from functools import partial from django.core.exceptions import PermissionDenied import commonware from rest_framework.mixins import (CreateModelMixin, DestroyModelMixin, RetrieveModelMixin, UpdateModelMixin) from rest_framework.permissions import BasePermission from rest_framework.relations import HyperlinkedRelatedField from rest_framework.response import Response from rest_framework.serializers import (HyperlinkedModelSerializer, ValidationError) from rest_framework.viewsets import GenericViewSet import amo from addons.models import AddonUpsell from mkt.api.authorization import (AllowAppOwner, PermissionAuthorization, switch) from mkt.api.base import AppViewSet, CompatRelatedField from mkt.constants.payments import PAYMENT_STATUSES from mkt.developers.forms_payments import PaymentCheckForm from mkt.developers.models import AddonPaymentAccount from mkt.webapps.models import Webapp from lib.pay_server import get_client log = commonware.log.getLogger('z.api.payments') class PaymentSerializer(HyperlinkedModelSerializer): upsell = HyperlinkedRelatedField(read_only=True, required=False, view_name='app-upsell-detail') account = HyperlinkedRelatedField(read_only=True, required=False, source='app_payment_account', view_name='app-payment-account-detail') class Meta: model = Webapp fields = ('upsell', 'account', 'url') view_name = 'app-payments-detail' class PaymentViewSet(RetrieveModelMixin, GenericViewSet): permission_classes = (AllowAppOwner,) queryset = Webapp.objects.filter() serializer_class = PaymentSerializer class UpsellSerializer(HyperlinkedModelSerializer): free = premium = CompatRelatedField( tastypie={'resource_name': 'app', 'api_name': 'apps'}, view_name='api_dispatch_detail') class Meta: model = AddonUpsell fields = ('free', 'premium', 'created', 'modified', 'url') view_name = 'app-upsell-detail' def validate(self, attrs): if attrs['free'].premium_type not in amo.ADDON_FREES: raise ValidationError('Upsell must be from a free app.') if attrs['premium'].premium_type in amo.ADDON_FREES: raise ValidationError('Upsell must be to a premium app.') return attrs class UpsellPermission(BasePermission): """ Permissions on the upsell object, is determined by permissions on the free and premium object. """ def check(self, request, free, premium): allow = AllowAppOwner() for app in free, premium: if app and not allow.has_object_permission(request, '', app): return False return True def has_object_permission(self, request, view, object): return self.check(request, object.free, object.premium) class UpsellViewSet(CreateModelMixin, DestroyModelMixin, RetrieveModelMixin, UpdateModelMixin, GenericViewSet): permission_classes = (switch('allow-b2g-paid-submission'), UpsellPermission,) queryset = AddonUpsell.objects.filter() serializer_class = UpsellSerializer def pre_save(self, obj): if not UpsellPermission().check(self.request, obj.free, obj.premium): raise PermissionDenied('Not allowed to alter that object') class AddonPaymentAccountPermission(BasePermission): """ Permissions on the app payment account object, is determined by permissions on the app the account is being used for. """ def check(self, request, app, account): if AllowAppOwner().has_object_permission(request, '', app): if account.shared or account.user.pk == request.amo_user.pk: return True else: log.info('AddonPaymentAccount access %(account)s denied ' 'for %(user)s: wrong user, not shared.'.format( {'account': account.pk, 'user': request.amo_user.pk})) else: log.info('AddonPaymentAccount access %(account)s denied ' 'for %(user)s: no app permission.'.format( {'account': account.pk, 'user': request.amo_user.pk})) return False def has_object_permission(self, request, view, object): return self.check(request, object.addon, object.payment_account) class AddonPaymentAccountSerializer(HyperlinkedModelSerializer): addon = CompatRelatedField( source='addon', tastypie={'resource_name': 'app', 'api_name': 'apps'}, view_name='api_dispatch_detail') payment_account = CompatRelatedField( tastypie={'resource_name': 'account', 'api_name': 'payments'}, view_name='api_dispatch_detail') class Meta: model = AddonPaymentAccount fields = ('addon', 'payment_account', 'provider', 'created', 'modified', 'url') view_name = 'app-payment-account-detail' def validate(self, attrs): if attrs['addon'].premium_type in amo.ADDON_FREES: raise ValidationError('App must be a premium app.') return attrs class AddonPaymentAccountViewSet(CreateModelMixin, RetrieveModelMixin, UpdateModelMixin, GenericViewSet): permission_classes = (AddonPaymentAccountPermission,) queryset = AddonPaymentAccount.objects.filter() serializer_class = AddonPaymentAccountSerializer def pre_save(self, obj): if not AddonPaymentAccountPermission().check(self.request, obj.addon, obj.payment_account): raise PermissionDenied('Not allowed to alter that object.') if self.request.method != 'POST': addon = obj.__class__.objects.no_cache().get(pk=obj.pk).addon if not obj.addon == addon: # This should be a 400 error. raise PermissionDenied('Cannot change the add-on.') def post_save(self, obj, created=False): """Ensure that the setup_bango method is called after creation.""" if created: uri = obj.__class__.setup_bango(obj.provider, obj.addon, obj.payment_account) obj.product_uri = uri obj.save() class PaymentCheckViewSet(AppViewSet): permission_classes = (AllowAppOwner,) form = PaymentCheckForm def create(self, request, *args, **kwargs): """ We aren't actually creating objects, but proxying them through to solitude. """ if not self.app: return Response('', status=400) self.check_object_permissions(request, self.app) client = get_client() res = client.api.bango.status.post( data={'seller_product_bango': self.app.app_payment_account.account_uri}) filtered = { 'bango': { 'status': PAYMENT_STATUSES[res['status']], 'errors': '' }, } return Response(filtered, status=200) class PaymentDebugViewSet(AppViewSet): permission_classes = (partial(PermissionAuthorization, 'Transaction', 'Debug',),) form = PaymentCheckForm def list(self, request, *args, **kwargs): if not self.app: return Response('', status=400) client = get_client() res = client.api.bango.debug.get( data={'seller_product_bango': self.app.app_payment_account.account_uri}) filtered = { 'bango': res['bango'], } return Response(filtered, status=200)
{ "repo_name": "Joergen/zamboni", "path": "mkt/developers/api_payments.py", "copies": "1", "size": "7832", "license": "bsd-3-clause", "hash": -8710100176168871000, "line_mean": 35.2592592593, "line_max": 79, "alpha_frac": 0.6271705822, "autogenerated": false, "ratio": 4.417371686407219, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.00021326878704698254, "num_lines": 216 }
from functools import partial from django.core.exceptions import ValidationError from django.db.models.fields import NOT_PROVIDED from nap.utils import digattr class field(property): '''A base class to compare against.''' def __new__(cls, *args, **kwargs): ''' Allow specifying keyword arguments when used as a decorator. ''' if not args: return partial(field, **kwargs) return super(field, cls).__new__(cls, *args, **kwargs) def __init__(self, *args, **kwargs): self.required = kwargs.pop('required', False) self.default = kwargs.pop('default', NOT_PROVIDED) super(field, self).__init__(*args, **kwargs) def __get__(self, instance, cls=None): if instance is None: return self return self.fget(instance._obj) def __set__(self, instance, value): if self.fset is None: return self.fset(instance._obj, value) class context_field(field): '''Special case of field that allows access to the Mapper itself''' def __get__(self, instance, cls=None): if instance is None: return self return self.fget(self, instance._obj) def __set__(self, instance, value): if self.fset is None: return return self.fset(self, instance._obj, value) class Field(field): ''' class V(DataMapper): foo = Field('bar', default=1) ''' def __init__(self, name, default=NOT_PROVIDED, filters=None, required=True): self.name = name self.default = default self.filters = filters or [] self.required = required def __get__(self, instance, cls=None): if instance is None: return self value = getattr(instance._obj, self.name, self.default) for filt in self.filters: try: value = filt.from_python(value) except (TypeError, ValueError): raise ValidationError('Invalid value') return value def __set__(self, instance, value): for filt in self.filters[::-1]: value = filt.to_python(value) setattr(instance._obj, self.name, value) class DigField(Field): def __get__(self, instance, cls=None): if instance is None: return self return digattr(instance._obj, self.name, self.default) def __set__(self, instance, value): raise NotImplementedError class MapperField(Field): def __init__(self, *args, **kwargs): self.mapper = kwargs.pop('mapper') super(MapperField, self).__init__(*args, **kwargs) def __get__(self, instance, cls=None): if instance is None: return self value = super(MapperField, self).__get__(instance, cls) mapper = self.mapper() return mapper << value def __set__(self, instance, value): mapper = self.mapper(instance) mapper._update(value, update=True)
{ "repo_name": "limbera/django-nap", "path": "nap/datamapper/fields.py", "copies": "1", "size": "3007", "license": "bsd-3-clause", "hash": 4938488024841692000, "line_mean": 28.1941747573, "line_max": 71, "alpha_frac": 0.5836381776, "autogenerated": false, "ratio": 4.147586206896552, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 103 }
from functools import partial from django.core.exceptions import ValidationError from cyder.base.tests import ModelTestMixin from cyder.core.ctnr.models import Ctnr from cyder.core.system.models import System from cyder.cydns.tests.utils import create_reverse_domain, create_zone, DNSTest from cyder.cydns.ip.utils import ip_to_reverse_name from cyder.cydns.domain.models import Domain from cyder.cydns.ptr.models import PTR from cyder.cydns.ip.models import Ip from cyder.cydhcp.interface.static_intr.models import StaticInterface from cyder.cydhcp.network.models import Network from cyder.cydhcp.range.models import Range from cyder.cydhcp.vrf.models import Vrf class PTRTests(DNSTest, ModelTestMixin): def setUp(self): super(PTRTests, self).setUp() Vrf.objects.create(name='test_vrf') self._128 = create_zone('128.in-addr.arpa') create_zone('8.ip6.arpa') self.c1 = Ctnr.objects.create(name='test_ctnr1') self.n = Network.objects.create( vrf=Vrf.objects.get(name='test_vrf'), ip_type='4', network_str='128.193.0.0/24') self.r = Range.objects.create( network=self.n, range_type='st', start_str='128.193.0.2', end_str='128.193.0.100') self.c1.ranges.add(self.r) for name in ('edu', 'oregonstate.edu', 'bar.oregonstate.edu', 'nothing', 'nothing.nothing', 'nothing.nothing.nothing'): d = Domain.objects.create(name=name) self.c1.domains.add(d) create_reverse_domain('8.6.2.0', ip_type='6') self.osu_block = "8620:105:F000:" self.create_network_range( network_str="8620:105::/32", start_str='8620:105:F000::1', end_str='8620:105:F000::1000', ip_type='6') def create_network_range(self, network_str, start_str, end_str, range_type="st", ip_type='4', domain=None): if domain is None: domain = Domain.objects.get(name="oregonstate.edu") n = Network.objects.create( vrf=Vrf.objects.get(name='test_vrf'), ip_type=ip_type, network_str=network_str) r = Range.objects.create( network=n, range_type=range_type, start_str=start_str, end_str=end_str, domain=domain, ip_type=ip_type) self.c1.ranges.add(r) def create_ptr(self, **kwargs): kwargs.setdefault('ctnr', self.c1) return PTR.objects.create(**kwargs) @property def objs(self): """Create objects for test_create_delete.""" return ( self.create_ptr( ip_str='128.123.123.2', ip_type='4', fqdn='a.oregonstate.edu'), self.create_ptr( ip_str='128.123.123.45', ip_type='4', fqdn='bbbbbbbbbbbbbb.nothing.nothing'), self.create_ptr( ip_str='128.123.123.197', ip_type='4', fqdn='c-c-c-c-c-c.nothing'), self.create_ptr( ip_str='128.123.123.254', ip_type='4', fqdn='d1d.edu'), ) def test_no_domain(self): for fqdn in ('lol.foo', 'oregonstate.com', 'me.oregondfastate.edu'): self.assertRaises( ValidationError, self.create_ptr, ip_str='244.123.123.123', ip_type='4', fqdn=fqdn) def test_invalid_name(self): ptr_v4 = self.create_ptr( ip_str='128.123.123.99', ip_type='4', fqdn='foo.oregonstate.edu') ptr_v6 = self.create_ptr( ip_str=(self.osu_block + ':1'), ip_type='6', fqdn='foo.oregonstate.edu') bad_fqdns = ( '2134!@#$!@', 'asdflj..com', 'A' * 257, '.oregonstate.edu', '%.s#.com') for fqdn in bad_fqdns: self.assertRaises( ValidationError, self.create_ptr, ip_str='128.123.123.123', ip_type='4', fqdn=fqdn) self.assertRaises( ValidationError, self.do_generic_update, ptr_v4, fqdn=fqdn) self.assertRaises( ValidationError, self.create_ptr, ip_str=(self.osu_block + ':2'), ip_type='6', fqdn=fqdn) self.assertRaises( ValidationError, self.do_generic_update, ptr_v6, fqdn=fqdn) def test_invalid_ip(self): ptr_v4 = self.create_ptr( ip_str='128.123.123.99', ip_type='4', fqdn='foo.oregonstate.edu') bad_ipv4_ips = ( '123.123', 'asdfasdf', 32141243, '128.123.123.123.123', '....', '1234.', None, False, True) for ip_str in bad_ipv4_ips: self.assertRaises( ValidationError, self.create_ptr, fqdn='oregonstate.edu', ip_str=ip_str, ip_type='4') self.assertRaises( ValidationError, self.do_generic_update, ptr_v4, ip_str=ip_str) ptr_v6 = self.create_ptr( ip_str=(self.osu_block + ':1'), ip_type='6', fqdn='foo.oregonstate.edu') bad_ipv6_ips = ( '123.123.123.123.', '123:!23:!23:', ':::', None, True, False, lambda x: x, '8::9:9:1', '11:9:9::1', '8.9.9.1', '11.9.9.1') for ip_str in bad_ipv6_ips: self.assertRaises( ValidationError, self.create_ptr, ip_str=ip_str, fqdn='oregonstate.edu', ip_type='6') self.assertRaises( ValidationError, self.do_generic_update, ptr_v6, ip_str=ip_str) def test_no_reverse_domain(self): self.assertRaises( ValidationError, self.create_ptr, fqdn='oregonstate.edu', ip_str='8.9.9.1', ip_type='4') self.assertRaises( ValidationError, self.create_ptr, fqdn='oregonstate.edu', ip_str='11.9.9.1', ip_type='4') def do_generic_remove(self, ip_str, fqdn, ip_type): ptr = PTR.objects.create( ip_str=ip_str, fqdn=fqdn, ip_type=ip_type, ctnr=self.c1) ptr.delete() ip = Ip(ip_str=ip_str, ip_type=ip_type) ip.clean_ip() self.assertFalse(PTR.objects.filter( fqdn=fqdn, ip_upper=ip.ip_upper, ip_lower=ip.ip_lower).exists()) def test_remove_ipv4(self): self.create_network_range( network_str='128.255.1.0/16', start_str='128.255.1.1', end_str='128.255.233.254') self.do_generic_remove( ip_str='128.255.233.244', ip_type='4', fqdn='asdf34foo.bar.oregonstate.edu') self.do_generic_remove( ip_str='128.255.11.13', ip_type='4', fqdn='fo124kfasdfko.bar.oregonstate.edu') self.do_generic_remove( ip_str='128.255.9.1', ip_type='4', fqdn='or1fdsaflkegonstate.edu') self.do_generic_remove( ip_str='128.255.1.7', ip_type='4', fqdn='12.bar.oregonstate.edu') self.do_generic_remove( ip_str='128.255.1.3', ip_type='4', fqdn='fcwoo.bar.oregonstate.edu') self.do_generic_remove( ip_str='128.255.1.2', ip_type='4', fqdn='asffad124jfasf-oregonstate.edu') def test_remove_ipv6(self): self.do_generic_remove( ip_str=(self.osu_block + ":1"), ip_type='6', fqdn='asdf34foo.bar.oregonstate.edu') self.do_generic_remove( ip_str=(self.osu_block + ":2"), ip_type='6', fqdn='fo124kfasdfko.bar.oregonstate.edu') self.do_generic_remove( ip_str=(self.osu_block + ":8"), ip_type='6', fqdn='or1fdsaflkegonstate.edu') self.do_generic_remove( ip_str=(self.osu_block + ":8"), ip_type='6', fqdn='12.bar.oregonstate.edu') self.do_generic_remove( ip_str=(self.osu_block + ":20"), ip_type='6', fqdn='fcwoo.bar.oregonstate.edu') self.do_generic_remove( ip_str=(self.osu_block + ":ad"), ip_type='6', fqdn='asffad124jfasf-oregonstate.edu') def do_generic_update(self, ptr, fqdn=None, ip_str=None): if fqdn is not None: ptr.fqdn = fqdn if ip_str is not None: ptr.ip_str = ip_str ptr.save() db_ptr = PTR.objects.get( fqdn=ptr.fqdn, ip_upper=ptr.ip_upper, ip_lower=ptr.ip_lower) self.assertEqual(ptr.fqdn, db_ptr.fqdn) self.assertEqual(ptr.ip_str, db_ptr.ip_str) def test_update_ipv4(self): self.create_network_range( network_str='128.193.1.0/24', start_str='128.193.1.1', end_str='128.193.1.100') ptr = self.create_ptr( ip_str='128.193.1.1', ip_type='4', fqdn='oregonstate.edu') self.do_generic_update(ptr, fqdn='nothing.nothing.nothing') self.do_generic_update(ptr, fqdn='google.edu') self.do_generic_update(ptr, fqdn='bar.oregonstate.edu') def test_update_ipv6(self): ptr = self.create_ptr( ip_str=(self.osu_block + ':1'), ip_type='6', fqdn='oregonstate.edu') self.do_generic_update(ptr, fqdn="nothing.nothing.nothing") self.do_generic_update(ptr, fqdn="google.edu") self.do_generic_update(ptr, fqdn="bar.oregonstate.edu") def test_ctnr_range(self): """Test that a PTR is allowed only in its IP's range's containers""" c2 = Ctnr.objects.create(name='test_ctnr2') r = self.r self.c1.ranges.add(r) self.create_ptr( fqdn='www1.oregonstate.edu', ip_str='128.193.0.2', ip_type='4', ctnr=self.c1) with self.assertRaises(ValidationError): self.create_ptr( fqdn='www2.oregonstate.edu', ip_str='128.193.0.3', ip_type='4', ctnr=c2) def test_target_existence(self): """Test that a PTR's target is not required to exist""" self.create_ptr( ip_str='128.193.0.2', fqdn='nonexistent.oregonstate.edu', ip_type='4') def test_domain_ctnr(self): """Test that a PTR's container is independent of its domain's container """ self.c1.domains.add(Domain.objects.get(name='oregonstate.edu')) c2 = Ctnr.objects.create(name='test_ctnr2') c2.ranges.add(self.r) self.create_ptr( ip_str='128.193.0.2', fqdn='foo1.oregonstate.edu', ip_type='4', ctnr=self.c1) self.create_ptr( ip_str='128.193.0.3', fqdn='foo2.oregonstate.edu', ip_type='4', ctnr=c2) def test_target_resembles_ip(self): """Test that a PTR's target cannot resemble an IP address""" for fqdn in ('10.234.30.253', '128.193.0.3', 'fe80::e1c9:1:228d:d8'): with self.assertRaises(ValidationError): self.create_ptr(ip_str='128.193.0.2', fqdn=fqdn, ip_type='4') def test_same_ip_as_static_intr(self): """Test that a PTR and a static inteface cannot share an IP (It doesn't matter whether the static interface is enabled.) """ def create_si(dns_enabled): s = System.objects.create(name='test_system', ctnr=self.c1) return StaticInterface.objects.create( mac='be:ef:fa:ce:12:34', label='foo1', domain=Domain.objects.get(name='oregonstate.edu'), ip_str='128.193.0.2', ip_type='4', system=s, ctnr=self.c1, dns_enabled=dns_enabled) create_si_enabled = partial(create_si, True) create_si_enabled.name = "StaticInterface with DNS enabled" create_si_disabled = partial(create_si, False) create_si_disabled.name = "StaticInterface with DNS disabled" def create_ptr(): return self.create_ptr( ip_str='128.193.0.2', ip_type='4', fqdn='foo2.oregonstate.edu') create_ptr.name = 'PTR' self.assertObjectsConflict((create_si_enabled, create_ptr)) self.assertObjectsConflict((create_si_disabled, create_ptr)) def test_same_ip(self): """Test that two PTRs cannot have the same IP""" self.create_ptr( ip_str='128.193.0.2', ip_type='4', fqdn='foo1.oregonstate.edu') with self.assertRaises(ValidationError): self.create_ptr( ip_str='128.193.0.2', ip_type='4', fqdn='foo2.oregonstate.edu') def test_ptr_in_dynamic_range(self): """Test that the IP cannot be in a dynamic range""" self.create_network_range( network_str='128.193.1.0/24', start_str='128.193.1.2', end_str='128.193.1.100', range_type='dy') with self.assertRaises(ValidationError): self.create_ptr( ip_str='128.193.1.2', ip_type='4', fqdn='foo.oregonstate.edu')
{ "repo_name": "murrown/cyder", "path": "cyder/cydns/ptr/tests/test_models.py", "copies": "1", "size": "12769", "license": "bsd-3-clause", "hash": -7142479321287074000, "line_mean": 37.3453453453, "line_max": 79, "alpha_frac": 0.5648837027, "autogenerated": false, "ratio": 3.2318400404960768, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9296417314318219, "avg_score": 0.00006128577557148985, "num_lines": 333 }
from functools import partial from django.db import connections, models, router from django.db.models.deletion import Collector from django.utils.encoding import python_2_unicode_compatible import bleach import six import olympia.core.logger from olympia.amo.fields import PositiveAutoField from olympia.amo.models import ManagerBase, ModelBase from olympia.amo.urlresolvers import linkify_bounce_url_callback from . import utils log = olympia.core.logger.getLogger('z.translations') class TranslationManager(ManagerBase): def remove_for(self, obj, locale): """Remove a locale for the given object.""" ids = [getattr(obj, f.attname) for f in obj._meta.translated_fields] qs = Translation.objects.filter(id__in=filter(None, ids), locale=locale) qs.update(localized_string=None, localized_string_clean=None) @python_2_unicode_compatible class Translation(ModelBase): """ Translation model. Use :class:`translations.fields.TranslatedField` instead of a plain foreign key to this model. """ autoid = PositiveAutoField(primary_key=True) id = models.PositiveIntegerField() locale = models.CharField(max_length=10) localized_string = models.TextField(null=True) localized_string_clean = models.TextField(null=True) objects = TranslationManager() class Meta: db_table = 'translations' unique_together = ('id', 'locale') def __str__(self): return ( six.text_type(self.localized_string) if self.localized_string else '') def __nonzero__(self): # __nonzero__ is called to evaluate an object in a boolean context. # We want Translations to be falsy if their string is empty. return (bool(self.localized_string) and bool(self.localized_string.strip())) def __eq__(self, other): # Django implements an __eq__ that only checks pks. We need to check # the strings if we're dealing with existing vs. unsaved Translations. return self.__cmp__(other) == 0 def __cmp__(self, other): def cmp(a, b): return (a > b) - (a < b) if hasattr(other, 'localized_string'): return cmp(self.localized_string, other.localized_string) else: return cmp(self.localized_string, other) def clean(self): if self.localized_string: self.localized_string = self.localized_string.strip() def save(self, **kwargs): self.clean() return super(Translation, self).save(**kwargs) def delete(self, using=None): # FIXME: if the Translation is the one used as default/fallback, # then deleting it will mean the corresponding field on the related # model will stay empty even if there are translations in other # languages! cls = self.__class__ using = using or router.db_for_write(cls, instance=self) # Look for all translations for the same string (id=self.id) except the # current one (autoid=self.autoid). qs = cls.objects.filter(id=self.id).exclude(autoid=self.autoid) if qs.using(using).exists(): # If other Translations for the same id exist, we just need to # delete this one and *only* this one, without letting Django # collect dependencies (it'd remove the others, which we want to # keep). assert self._get_pk_val() is not None collector = Collector(using=using) collector.collect([self], collect_related=False) # In addition, because we have FK pointing to a non-unique column, # we need to force MySQL to ignore constraints because it's dumb # and would otherwise complain even if there are remaining rows # that matches the FK. with connections[using].constraint_checks_disabled(): collector.delete() else: # If no other Translations with that id exist, then we should let # django behave normally. It should find the related model and set # the FKs to NULL. return super(Translation, self).delete(using=using) delete.alters_data = True @classmethod def new(cls, string, locale, id=None): """ Jumps through all the right hoops to create a new translation. If ``id`` is not given a new id will be created using ``translations_seq``. Otherwise, the id will be used to add strings to an existing translation. To increment IDs we use a setting on MySQL. This is to support multiple database masters -- it's just crazy enough to work! See bug 756242. """ if id is None: # Get a sequence key for the new translation. with connections['default'].cursor() as cursor: cursor.execute(""" UPDATE translations_seq SET id=LAST_INSERT_ID( id + @@global.auto_increment_increment ) """) # The sequence table should never be empty. But alas, if it is, # let's fix it. if not cursor.rowcount > 0: cursor.execute(""" INSERT INTO translations_seq (id) VALUES(LAST_INSERT_ID( id + @@global.auto_increment_increment )) """) cursor.execute('SELECT LAST_INSERT_ID()') id = cursor.fetchone()[0] # Update if one exists, otherwise create a new one. q = {'id': id, 'locale': locale} try: trans = cls.objects.get(**q) trans.localized_string = string except cls.DoesNotExist: trans = cls(localized_string=string, **q) return trans @python_2_unicode_compatible class PurifiedTranslation(Translation): """Run the string through bleach to get a safe version.""" allowed_tags = [ 'a', 'abbr', 'acronym', 'b', 'blockquote', 'code', 'em', 'i', 'li', 'ol', 'strong', 'ul', ] allowed_attributes = { 'a': ['href', 'title', 'rel'], 'abbr': ['title'], 'acronym': ['title'], } class Meta: proxy = True def __str__(self): if not self.localized_string_clean: self.clean() return six.text_type(self.localized_string_clean) def __html__(self): return six.text_type(self) def __truncate__(self, length, killwords, end): return utils.truncate(six.text_type(self), length, killwords, end) def clean(self): from olympia.amo.utils import clean_nl super(PurifiedTranslation, self).clean() cleaned = self.clean_localized_string() self.localized_string_clean = clean_nl(cleaned).strip() def clean_localized_string(self): # All links (text and markup) are normalized. linkify_filter = partial( bleach.linkifier.LinkifyFilter, callbacks=[linkify_bounce_url_callback, bleach.callbacks.nofollow]) # Keep only the allowed tags and attributes, escape the rest. cleaner = bleach.Cleaner( tags=self.allowed_tags, attributes=self.allowed_attributes, filters=[linkify_filter]) return cleaner.clean(six.text_type(self.localized_string)) class LinkifiedTranslation(PurifiedTranslation): """Run the string through bleach to get a linkified version.""" allowed_tags = ['a'] class Meta: proxy = True class NoLinksNoMarkupTranslation(LinkifiedTranslation): """Run the string through bleach, escape markup and strip all the links.""" class Meta: proxy = True def clean_localized_string(self): # First pass: bleach everything, but leave links untouched. cleaned = super(LinkifiedTranslation, self).clean_localized_string() # Second pass: call linkify to empty the inner text of all links. emptied_links = bleach.linkify( cleaned, callbacks=[lambda attrs, new: {'_text': ''}]) # Third pass: now strip links (only links will be stripped, other # forbidden tags are already bleached/escaped. allowed_tags = self.allowed_tags[:] # Make a copy. allowed_tags.remove('a') return bleach.clean(emptied_links, tags=allowed_tags, strip=True) class TranslationSequence(models.Model): """ The translations_seq table, so migrations will create it during testing. """ id = models.IntegerField(primary_key=True) class Meta: db_table = 'translations_seq' def delete_translation(obj, fieldname): field = obj._meta.get_field(fieldname) trans_id = getattr(obj, field.attname) obj.update(**{field.name: None}) if trans_id: Translation.objects.filter(id=trans_id).delete()
{ "repo_name": "aviarypl/mozilla-l10n-addons-server", "path": "src/olympia/translations/models.py", "copies": "1", "size": "9083", "license": "bsd-3-clause", "hash": -4284851252454963700, "line_mean": 33.6679389313, "line_max": 79, "alpha_frac": 0.6097104481, "autogenerated": false, "ratio": 4.252340823970037, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 262 }
from functools import partial from django.db import connections, models, router from django.db.models.deletion import Collector import bleach import olympia.core.logger from olympia.amo.fields import PositiveAutoField from olympia.amo.models import ManagerBase, ModelBase from olympia.amo.urlresolvers import linkify_bounce_url_callback from . import utils log = olympia.core.logger.getLogger('z.translations') class TranslationManager(ManagerBase): def remove_for(self, obj, locale): """Remove a locale for the given object.""" ids = [getattr(obj, f.attname) for f in obj._meta.translated_fields] qs = Translation.objects.filter(id__in=filter(None, ids), locale=locale) qs.update(localized_string=None, localized_string_clean=None) class Translation(ModelBase): """ Translation model. Use :class:`translations.fields.TranslatedField` instead of a plain foreign key to this model. """ autoid = PositiveAutoField(primary_key=True) id = models.PositiveIntegerField() locale = models.CharField(max_length=10) localized_string = models.TextField(null=True) localized_string_clean = models.TextField(null=True) objects = TranslationManager() class Meta: db_table = 'translations' unique_together = ('id', 'locale') def __unicode__(self): return self.localized_string and unicode(self.localized_string) or '' def __nonzero__(self): # __nonzero__ is called to evaluate an object in a boolean context. We # want Translations to be falsy if their string is empty. return (bool(self.localized_string) and bool(self.localized_string.strip())) def __eq__(self, other): # Django implements an __eq__ that only checks pks. We need to check # the strings if we're dealing with existing vs. unsaved Translations. return self.__cmp__(other) == 0 def __cmp__(self, other): if hasattr(other, 'localized_string'): return cmp(self.localized_string, other.localized_string) else: return cmp(self.localized_string, other) def clean(self): if self.localized_string: self.localized_string = self.localized_string.strip() def save(self, **kwargs): self.clean() return super(Translation, self).save(**kwargs) def delete(self, using=None): # FIXME: if the Translation is the one used as default/fallback, # then deleting it will mean the corresponding field on the related # model will stay empty even if there are translations in other # languages! cls = self.__class__ using = using or router.db_for_write(cls, instance=self) # Look for all translations for the same string (id=self.id) except the # current one (autoid=self.autoid). qs = cls.objects.filter(id=self.id).exclude(autoid=self.autoid) if qs.using(using).exists(): # If other Translations for the same id exist, we just need to # delete this one and *only* this one, without letting Django # collect dependencies (it'd remove the others, which we want to # keep). assert self._get_pk_val() is not None collector = Collector(using=using) collector.collect([self], collect_related=False) # In addition, because we have FK pointing to a non-unique column, # we need to force MySQL to ignore constraints because it's dumb # and would otherwise complain even if there are remaining rows # that matches the FK. with connections[using].constraint_checks_disabled(): collector.delete() else: # If no other Translations with that id exist, then we should let # django behave normally. It should find the related model and set # the FKs to NULL. return super(Translation, self).delete(using=using) delete.alters_data = True @classmethod def new(cls, string, locale, id=None): """ Jumps through all the right hoops to create a new translation. If ``id`` is not given a new id will be created using ``translations_seq``. Otherwise, the id will be used to add strings to an existing translation. To increment IDs we use a setting on MySQL. This is to support multiple database masters -- it's just crazy enough to work! See bug 756242. """ if id is None: # Get a sequence key for the new translation. with connections['default'].cursor() as cursor: cursor.execute(""" UPDATE translations_seq SET id=LAST_INSERT_ID( id + @@global.auto_increment_increment ) """) # The sequence table should never be empty. But alas, if it is, # let's fix it. if not cursor.rowcount > 0: cursor.execute(""" INSERT INTO translations_seq (id) VALUES(LAST_INSERT_ID( id + @@global.auto_increment_increment )) """) cursor.execute('SELECT LAST_INSERT_ID()') id = cursor.fetchone()[0] # Update if one exists, otherwise create a new one. q = {'id': id, 'locale': locale} try: trans = cls.objects.get(**q) trans.localized_string = string except cls.DoesNotExist: trans = cls(localized_string=string, **q) return trans class PurifiedTranslation(Translation): """Run the string through bleach to get a safe version.""" allowed_tags = [ 'a', 'abbr', 'acronym', 'b', 'blockquote', 'code', 'em', 'i', 'li', 'ol', 'strong', 'ul', ] allowed_attributes = { 'a': ['href', 'title', 'rel'], 'abbr': ['title'], 'acronym': ['title'], } class Meta: proxy = True def __unicode__(self): if not self.localized_string_clean: self.clean() return unicode(self.localized_string_clean) def __html__(self): return unicode(self) def __truncate__(self, length, killwords, end): return utils.truncate(unicode(self), length, killwords, end) def clean(self): from olympia.amo.utils import clean_nl super(PurifiedTranslation, self).clean() cleaned = self.clean_localized_string() self.localized_string_clean = clean_nl(cleaned).strip() def clean_localized_string(self): # All links (text and markup) are normalized. linkify_filter = partial( bleach.linkifier.LinkifyFilter, callbacks=[linkify_bounce_url_callback, bleach.callbacks.nofollow]) # Keep only the allowed tags and attributes, escape the rest. cleaner = bleach.Cleaner( tags=self.allowed_tags, attributes=self.allowed_attributes, filters=[linkify_filter]) return cleaner.clean(unicode(self.localized_string)) class LinkifiedTranslation(PurifiedTranslation): """Run the string through bleach to get a linkified version.""" allowed_tags = ['a'] class Meta: proxy = True class NoLinksNoMarkupTranslation(LinkifiedTranslation): """Run the string through bleach, escape markup and strip all the links.""" class Meta: proxy = True def clean_localized_string(self): # First pass: bleach everything, but leave links untouched. cleaned = super(LinkifiedTranslation, self).clean_localized_string() # Second pass: call linkify to empty the inner text of all links. emptied_links = bleach.linkify( cleaned, callbacks=[lambda attrs, new: {'_text': ''}]) # Third pass: now strip links (only links will be stripped, other # forbidden tags are already bleached/escaped. allowed_tags = self.allowed_tags[:] # Make a copy. allowed_tags.remove('a') return bleach.clean(emptied_links, tags=allowed_tags, strip=True) class TranslationSequence(models.Model): """ The translations_seq table, so migrations will create it during testing. """ id = models.IntegerField(primary_key=True) class Meta: db_table = 'translations_seq' def delete_translation(obj, fieldname): field = obj._meta.get_field(fieldname) trans_id = getattr(obj, field.attname) obj.update(**{field.name: None}) if trans_id: Translation.objects.filter(id=trans_id).delete()
{ "repo_name": "atiqueahmedziad/addons-server", "path": "src/olympia/translations/models.py", "copies": "1", "size": "8844", "license": "bsd-3-clause", "hash": -7537282915667919000, "line_mean": 33.8188976378, "line_max": 79, "alpha_frac": 0.6102442334, "autogenerated": false, "ratio": 4.297376093294461, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 254 }
from functools import partial from django.db import models from django.db.models.fields.related import ( RECURSIVE_RELATIONSHIP_CONSTANT, ManyToManyDescriptor, ManyToManyField, ManyToManyRel, RelatedField, create_many_to_many_intermediary_model, ) class CustomManyToManyField(RelatedField): """ Ticket #24104 - Need to have a custom ManyToManyField, which is not an inheritor of ManyToManyField. """ many_to_many = True def __init__(self, to, db_constraint=True, swappable=True, related_name=None, related_query_name=None, limit_choices_to=None, symmetrical=None, through=None, through_fields=None, db_table=None, **kwargs): try: to._meta except AttributeError: to = str(to) kwargs['rel'] = ManyToManyRel( self, to, related_name=related_name, related_query_name=related_query_name, limit_choices_to=limit_choices_to, symmetrical=symmetrical if symmetrical is not None else (to == RECURSIVE_RELATIONSHIP_CONSTANT), through=through, through_fields=through_fields, db_constraint=db_constraint, ) self.swappable = swappable self.db_table = db_table if kwargs['rel'].through is not None: assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used." super().__init__(**kwargs) def contribute_to_class(self, cls, name, **kwargs): if self.remote_field.symmetrical and ( self.remote_field.model == "self" or self.remote_field.model == cls._meta.object_name): self.remote_field.related_name = "%s_rel_+" % name super().contribute_to_class(cls, name, **kwargs) if not self.remote_field.through and not cls._meta.abstract and not cls._meta.swapped: self.remote_field.through = create_many_to_many_intermediary_model(self, cls) setattr(cls, self.name, ManyToManyDescriptor(self.remote_field)) self.m2m_db_table = partial(self._get_m2m_db_table, cls._meta) def get_internal_type(self): return 'ManyToManyField' # Copy those methods from ManyToManyField because they don't call super() internally contribute_to_related_class = ManyToManyField.__dict__['contribute_to_related_class'] _get_m2m_attr = ManyToManyField.__dict__['_get_m2m_attr'] _get_m2m_reverse_attr = ManyToManyField.__dict__['_get_m2m_reverse_attr'] _get_m2m_db_table = ManyToManyField.__dict__['_get_m2m_db_table'] class InheritedManyToManyField(ManyToManyField): pass class MediumBlobField(models.BinaryField): """ A MySQL BinaryField that uses a different blob size. """ def db_type(self, connection): return 'MEDIUMBLOB'
{ "repo_name": "edmorley/django", "path": "tests/schema/fields.py", "copies": "68", "size": "2806", "license": "bsd-3-clause", "hash": 715381159624427100, "line_mean": 40.2647058824, "line_max": 118, "alpha_frac": 0.6593014968, "autogenerated": false, "ratio": 3.817687074829932, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": null, "num_lines": null }
from functools import partial from django.db import models from django.db.models.fields.related import ( RECURSIVE_RELATIONSHIP_CONSTANT, ManyToManyDescriptor, RelatedField, create_many_to_many_intermediary_model, ) class CustomManyToManyField(RelatedField): """ Ticket #24104 - Need to have a custom ManyToManyField, which is not an inheritor of ManyToManyField. """ many_to_many = True def __init__(self, to, db_constraint=True, swappable=True, related_name=None, related_query_name=None, limit_choices_to=None, symmetrical=None, through=None, through_fields=None, db_table=None, **kwargs): try: to._meta except AttributeError: to = str(to) kwargs['rel'] = models.ManyToManyRel( self, to, related_name=related_name, related_query_name=related_query_name, limit_choices_to=limit_choices_to, symmetrical=symmetrical if symmetrical is not None else (to == RECURSIVE_RELATIONSHIP_CONSTANT), through=through, through_fields=through_fields, db_constraint=db_constraint, ) self.swappable = swappable self.db_table = db_table if kwargs['rel'].through is not None: assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used." super().__init__( related_name=related_name, related_query_name=related_query_name, limit_choices_to=limit_choices_to, **kwargs, ) def contribute_to_class(self, cls, name, **kwargs): if self.remote_field.symmetrical and ( self.remote_field.model == "self" or self.remote_field.model == cls._meta.object_name): self.remote_field.related_name = "%s_rel_+" % name super().contribute_to_class(cls, name, **kwargs) if not self.remote_field.through and not cls._meta.abstract and not cls._meta.swapped: self.remote_field.through = create_many_to_many_intermediary_model(self, cls) setattr(cls, self.name, ManyToManyDescriptor(self.remote_field)) self.m2m_db_table = partial(self._get_m2m_db_table, cls._meta) def get_internal_type(self): return 'ManyToManyField' # Copy those methods from ManyToManyField because they don't call super() internally contribute_to_related_class = models.ManyToManyField.__dict__['contribute_to_related_class'] _get_m2m_attr = models.ManyToManyField.__dict__['_get_m2m_attr'] _get_m2m_reverse_attr = models.ManyToManyField.__dict__['_get_m2m_reverse_attr'] _get_m2m_db_table = models.ManyToManyField.__dict__['_get_m2m_db_table'] class InheritedManyToManyField(models.ManyToManyField): pass class MediumBlobField(models.BinaryField): """ A MySQL BinaryField that uses a different blob size. """ def db_type(self, connection): return 'MEDIUMBLOB'
{ "repo_name": "freakboy3742/django", "path": "tests/schema/fields.py", "copies": "5", "size": "2976", "license": "bsd-3-clause", "hash": 8067035545147512000, "line_mean": 39.7671232877, "line_max": 118, "alpha_frac": 0.6518817204, "autogenerated": false, "ratio": 3.8153846153846156, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.001384953466905053, "num_lines": 73 }
from functools import partial from django.db import models from model_utils.managers import InheritanceManager from coberturas_medicas.models import Cobertura from core.models import Persona, Profesional from dj_utils.mixins import ShowInfoMixin from dj_utils.models import BaseModel, uploadTenantFilename class Paciente(BaseModel): """ Persona que se atiende en el lugar. """ persona = models.OneToOneField(Persona, verbose_name='persona', on_delete=models.CASCADE) fecha_ingreso = models.DateField('fecha de ingreso') observaciones = models.TextField('observaciones', blank=True) # relaciones cobertura_medica = models.ForeignKey(Cobertura, verbose_name='cobertura', null=True, on_delete=models.SET_NULL) def __str__(self): return "{}".format(self.persona) class Meta: ordering = ('persona', ) verbose_name = "paciente" verbose_name_plural = "pacientes" def tratamiento_activo(self, el_dia=None): from tratamientos.models import Planificacion, MotivoConsulta try: if el_dia: return self.motivos_de_consulta.filter(creado_el__lte=el_dia).latest('creado_el') return self.motivos_de_consulta.filter( planificaciones__estado__in=Planificacion.estados_activos()).latest('creado_el') except MotivoConsulta.DoesNotExist: return None def ultimo_motivo_consulta(self): from tratamientos.models import MotivoConsulta try: return self.motivos_de_consulta.latest('creado_el') except MotivoConsulta.DoesNotExist: return None class RegistroBiometrico(BaseModel, ShowInfoMixin): """ Registro de datos biométricos. Como varían en el tiempo, se deja constancia de la fecha. """ paciente = models.ForeignKey(Paciente, related_name='registros_biometricos', on_delete=models.CASCADE) peso = models.DecimalField('peso (kg)', max_digits=5, decimal_places=2, null=True) altura = models.DecimalField('altura (mts)', max_digits=5, decimal_places=2, null=True) # demás datos biomédicos. profesional = models.ForeignKey(Profesional, on_delete=models.CASCADE) # archivos def __str__(self): return "Registro biométrico de {} ({})".format(self.paciente, self.creado_el) class Meta: verbose_name = 'registro biométrico' verbose_name_plural = 'registros biométricos' field_info = ('modificado_el', 'peso', 'altura', ) class Antecedente(BaseModel, ShowInfoMixin): """ Representa la historia médica del paciente. Contiene datos médicos y relevantes sobre el paciente. """ paciente = models.OneToOneField(Paciente, on_delete=models.CASCADE) patologicos = models.TextField('patológicos', blank=True) quirurgicos = models.TextField('quirúrgicos', blank=True) traumaticos = models.TextField('traumáticos', blank=True) alergicos = models.TextField('alérgicos', blank=True) heredo_familiar = models.TextField('heredo familiar', blank=True) habitos_fisiologicos = models.TextField('hábitos fisiológicos', blank=True) actividad_fisica= models.TextField('actividad física', blank=True) habitos_patologicos = models.TextField('hábitos patológicos', blank=True) medicaciones = models.TextField('medicaciones', blank=True) estudios_complementarios = models.TextField('estudios complementarios', blank=True) menarca = models.DateField('MENARCA', null=True) fum = models.DateField('FUM', null=True) tipo_partos = models.TextField('tipo de partos', blank=True) observaciones = models.TextField('observaciones', blank=True) def __unicode__(self): return "Antecedentes de {}".format( self.paciente.persona.nombre) class Meta: verbose_name = "antecedente" verbose_name_plural = "antecedentes" field_info = ('patologicos', 'quirurgicos', 'traumaticos', 'alergicos', 'heredo_familiar', 'habitos_fisiologicos', 'actividad_fisica', 'habitos_patologicos', 'medicaciones', 'estudios_complementarios', 'menarca', 'fum', 'tipo_partos', 'observaciones') class EntradaHistoriaClinica(BaseModel, ShowInfoMixin): paciente = models.ForeignKey(Paciente, related_name="entradas_historiaclinica", on_delete=models.CASCADE) profesional = models.ForeignKey(Profesional, on_delete=models.CASCADE) objects = InheritanceManager() class Meta: verbose_name_plural = "Entradas de historia clínica" verbose_name = "Entrada de historia clínica" def __str__(self): return "Entrada de {} por {}".format(self.paciente, self.profesional) class ComentariosHistoriaClinica(EntradaHistoriaClinica): """ Representa una entrada en la historia clínica del paciente. """ comentarios = models.TextField(verbose_name="comentarios") class Meta: verbose_name_plural = "comentarios de historia clinica" verbose_name = "comentario de historia clinica" def __str__(self): return "Comentario de {}".format(self.paciente) field_info = ('comentarios', ) class ImagenesHistoriaClinica(EntradaHistoriaClinica): """ Representa una imagen ingresada en la historia clinica """ imagen = models.ImageField( verbose_name="imágen", upload_to=partial(uploadTenantFilename, "historia_imagenes")) comentarios = models.TextField(verbose_name="comentarios", null=True, blank=True) class Meta: verbose_name_plural = "imágenes de historia clínica" verbose_name = "imagen de historia clínica" def __str__(self): return "Imágen de {}".format(self.paciente) field_info = ('imagen', 'comentarios', )
{ "repo_name": "mava-ar/sgk", "path": "src/pacientes/models.py", "copies": "1", "size": "5766", "license": "apache-2.0", "hash": -4758428310908380000, "line_mean": 36.5163398693, "line_max": 115, "alpha_frac": 0.6921602787, "autogenerated": false, "ratio": 3.1800554016620497, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9366251582514062, "avg_score": 0.0011928195695972816, "num_lines": 153 }
from functools import partial from django.db import models from six import string_types from .field_mappings import STRING_FIELDS class Evaluator(object): def __init__(self, faker, factory, iteration): self.faker = faker self.factory = factory self.iteration = iteration super(Evaluator, self).__init__() def evaluate(self, value): from .blueprint import Blueprint if isinstance(value, Blueprint): return value.make_one(iteration=self.iteration) if callable(value): if value.__name__ == "<lambda>": return value(self.iteration, self.faker) else: return value() if isinstance(value, string_types): try: return value.format(self.iteration, self.faker) except KeyError: return value return value def evaluate_fake(self, resolver, field): if callable(resolver[0]): func = partial(resolver[0], self.faker, field) else: func = getattr(self.faker, resolver[0]) return func(*resolver[1], **resolver[2]) def fake_value(self, model, field): from . import field_mappings if field.blank and isinstance(field, STRING_FIELDS): return "" if isinstance(field, models.ForeignKey): return self.factory.make_one(field.related_model, iteration=self.iteration) if field.name in field_mappings.mappings_names: return self.evaluate_fake(field_mappings.mappings_names[field.name], field) for field_class, fake in field_mappings.mappings_types.items(): if isinstance(field, field_class): return self.evaluate_fake(fake, field) model_name = "%s.%s" % (model._meta.app_label, model._meta.model_name) raise ValueError( "Cant generate a value for model `%s` field `%s`" % (model_name, field.name) )
{ "repo_name": "fcurella/django-fakery", "path": "django_fakery/values.py", "copies": "1", "size": "1984", "license": "mit", "hash": 395617289096281540, "line_mean": 32.0666666667, "line_max": 88, "alpha_frac": 0.6033266129, "autogenerated": false, "ratio": 4.239316239316239, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0005660537963908751, "num_lines": 60 }
from functools import partial from django.db import models, router, transaction from django.db.models import Max, Model, signals from django.db.models.fields.related import ManyToManyField as _ManyToManyField from django.db.models.fields.related import lazy_related_operation, resolve_relation from django.db.models.fields.related_descriptors import ManyToManyDescriptor, create_forward_many_to_many_manager from django.db.models.utils import make_model_tuple from django.utils.encoding import force_str from django.utils.functional import cached_property from django.utils.translation import gettext_lazy as _ from .compat import get_rel from .forms import SortedMultipleChoiceField SORT_VALUE_FIELD_NAME = 'sort_value' def create_sorted_many_related_manager(superclass, rel, *args, **kwargs): RelatedManager = create_forward_many_to_many_manager( superclass, rel, *args, **kwargs) class SortedRelatedManager(RelatedManager): def _apply_rel_ordering(self, queryset): return queryset.extra(order_by=['%s.%s' % ( self.through._meta.db_table, self.through._sort_field_name, # pylint: disable=protected-access )]) def get_queryset(self): # We use ``extra`` method here because we have no other access to # the extra sorting field of the intermediary model. The fields # are hidden for joins because we set ``auto_created`` on the # intermediary's meta options. try: # pylint: disable=protected-access return self.instance._prefetched_objects_cache[self.prefetch_cache_name] except (AttributeError, KeyError): queryset = super().get_queryset() return self._apply_rel_ordering(queryset) def get_prefetch_queryset(self, instances, queryset=None): # Apply the same ordering for prefetch ones result = super().get_prefetch_queryset(instances, queryset) return (self._apply_rel_ordering(result[0]),) + result[1:] def set(self, objs, **kwargs): # pylint: disable=arguments-differ # Choosing to clear first will ensure the order is maintained. kwargs['clear'] = True super().set(objs, **kwargs) set.alters_data = True # pylint: disable=arguments-differ def _add_items(self, source_field_name, target_field_name, *objs, **kwargs): # source_field_name: the PK fieldname in join table for the source object # target_field_name: the PK fieldname in join table for the target object # *objs - objects to add. Either object instances, or primary keys of object instances. # **kwargs: in Django >= 2.2; contains `through_defaults` key. through_defaults = kwargs.get('through_defaults') or {} # If there aren't any objects, there is nothing to do. if objs: # Django uses a set here, we need to use a list to keep the # correct ordering. new_ids = [] for obj in objs: if isinstance(obj, self.model): if not router.allow_relation(obj, self.instance): raise ValueError( 'Cannot add "%r": instance is on database "%s", value is on database "%s"' % (obj, self.instance._state.db, obj._state.db) # pylint: disable=protected-access ) fk_val = self.through._meta.get_field(target_field_name).get_foreign_related_value(obj)[0] if fk_val is None: raise ValueError( 'Cannot add "%r": the value for field "%s" is None' % (obj, target_field_name) ) new_ids.append(fk_val) elif isinstance(obj, Model): raise TypeError( "'%s' instance expected, got %r" % (self.model._meta.object_name, obj) ) else: new_ids.append(obj) db = router.db_for_write(self.through, instance=self.instance) manager = self.through._default_manager.using(db) # pylint: disable=protected-access vals = (self.through._default_manager.using(db) # pylint: disable=protected-access .values_list(target_field_name, flat=True) .filter(**{ source_field_name: self.related_val[0], '%s__in' % target_field_name: new_ids, })) # make set.difference_update() keeping ordering new_ids_set = set(new_ids) new_ids_set.difference_update(vals) new_ids = list(filter(lambda _id: _id in new_ids_set, new_ids)) # Add the ones that aren't there already with transaction.atomic(using=db, savepoint=False): if self.reverse or source_field_name == self.source_field_name: # Don't send the signal when we are inserting the # duplicate data row for symmetrical reverse entries. signals.m2m_changed.send( sender=self.through, action='pre_add', instance=self.instance, reverse=self.reverse, model=self.model, pk_set=new_ids_set, using=db, ) rel_source_fk = self.related_val[0] rel_through = self.through sort_field_name = rel_through._sort_field_name # pylint: disable=protected-access # Use the max of all indices as start index... # maybe an autoincrement field should do the job more efficiently ? source_queryset = manager.filter(**{'%s_id' % source_field_name: rel_source_fk}) sort_value_max = source_queryset.aggregate(max=Max(sort_field_name))['max'] or 0 bulk_data = [ dict(through_defaults, **{ '%s_id' % source_field_name: rel_source_fk, '%s_id' % target_field_name: obj_id, sort_field_name: obj_idx, }) for obj_idx, obj_id in enumerate(new_ids, sort_value_max + 1) ] manager.bulk_create([rel_through(**data) for data in bulk_data]) if self.reverse or source_field_name == self.source_field_name: # Don't send the signal when we are inserting the # duplicate data row for symmetrical reverse entries. signals.m2m_changed.send( sender=self.through, action='post_add', instance=self.instance, reverse=self.reverse, model=self.model, pk_set=new_ids_set, using=db, ) return SortedRelatedManager class SortedManyToManyDescriptor(ManyToManyDescriptor): def __init__(self, field): super().__init__(field.remote_field) @cached_property def related_manager_cls(self): model = self.rel.model return create_sorted_many_related_manager( model._default_manager.__class__, # pylint: disable=protected-access self.rel, # This is the new `reverse` argument (which ironically should # be False) reverse=False, ) class SortedManyToManyField(_ManyToManyField): """ Providing a many to many relation that remembers the order of related objects. Accept a boolean ``sorted`` attribute which specifies if relation is ordered or not. Default is set to ``True``. If ``sorted`` is set to ``False`` the field will behave exactly like django's ``ManyToManyField``. Accept a class ``base_class`` attribute which specifies the base class of the intermediate model. It allows to customize the intermediate model. """ def __init__(self, to, sorted=True, base_class=None, **kwargs): # pylint: disable=redefined-builtin self.sorted = sorted self.sort_value_field_name = kwargs.pop( 'sort_value_field_name', SORT_VALUE_FIELD_NAME) # Base class of through model self.base_class = base_class super().__init__(to, **kwargs) if self.sorted: self.help_text = kwargs.get('help_text', None) def deconstruct(self): # We have to persist custom added options in the ``kwargs`` # dictionary. For readability only non-default values are stored. name, path, args, kwargs = super().deconstruct() if self.sort_value_field_name is not SORT_VALUE_FIELD_NAME: kwargs['sort_value_field_name'] = self.sort_value_field_name if self.sorted is not True: kwargs['sorted'] = self.sorted return name, path, args, kwargs def check(self, **kwargs): return ( super().check(**kwargs) + self._check_through_sortedm2m() ) def _check_through_sortedm2m(self): rel = get_rel(self) # Check if the custom through model of a SortedManyToManyField as a # valid '_sort_field_name' attribute if self.sorted and rel.through: assert hasattr(rel.through, '_sort_field_name'), ( "The model is used as an intermediate model by " "'%s' but has no defined '_sort_field_name' attribute" % rel.through ) return [] # pylint: disable=inconsistent-return-statements def contribute_to_class(self, cls, name, **kwargs): if not self.sorted: return super().contribute_to_class(cls, name, **kwargs) # To support multiple relations to self, it's useful to have a non-None # related name on symmetrical relations for internal reasons. The # concept doesn't make a lot of sense externally ("you want me to # specify *what* on my non-reversible relation?!"), so we set it up # automatically. The funky name reduces the chance of an accidental # clash. rel = get_rel(self) if rel.symmetrical and (rel.model == "self" or rel.model == cls._meta.object_name): rel.related_name = "%s_rel_+" % name elif rel.is_hidden(): # If the backwards relation is disabled, replace the original # related_name with one generated from the m2m field name. Django # still uses backwards relations internally and we need to avoid # clashes between multiple m2m fields with related_name == '+'. rel.related_name = "_%s_%s_+" % (cls.__name__.lower(), name) # pylint: disable=bad-super-call super(_ManyToManyField, self).contribute_to_class(cls, name, **kwargs) # The intermediate m2m model is not auto created if: # 1) There is a manually specified intermediate, or # 2) The class owning the m2m field is abstract. # 3) The class owning the m2m field has been swapped out. if not cls._meta.abstract: if rel.through: def resolve_through_model(_, model): rel.through = model lazy_related_operation(resolve_through_model, cls, rel.through) elif not cls._meta.swapped: rel.through = self.create_intermediate_model(cls) # Add the descriptor for the m2m relation setattr(cls, self.name, SortedManyToManyDescriptor(self)) # Set up the accessor for the m2m table name for the relation self.m2m_db_table = partial(self._get_m2m_db_table, cls._meta) # pylint: disable=attribute-defined-outside-init def get_internal_type(self): return 'ManyToManyField' def formfield(self, **kwargs): # pylint: disable=arguments-differ defaults = {} if self.sorted: defaults['form_class'] = SortedMultipleChoiceField defaults.update(kwargs) return super().formfield(**defaults) def create_intermediate_model(self, klass): base_classes = (self.base_class, models.Model) if self.base_class else (models.Model,) return create_sortable_many_to_many_intermediary_model( self, klass, self.sort_value_field_name, base_classes=base_classes) def create_sortable_many_to_many_intermediary_model(field, klass, sort_field_name, base_classes=None): def set_managed(model, related, through): through._meta.managed = model._meta.managed or related._meta.managed to_model = resolve_relation(klass, field.remote_field.model) name = '%s_%s' % (klass._meta.object_name, field.name) lazy_related_operation(set_managed, klass, to_model, name) base_classes = base_classes if base_classes else (models.Model,) # TODO : use autoincrement here ? sort_field = models.IntegerField(default=0) to = make_model_tuple(to_model)[1] from_ = klass._meta.model_name if to == from_: to = 'to_%s' % to from_ = 'from_%s' % from_ meta = type('Meta', (), { 'db_table': field._get_m2m_db_table(klass._meta), # pylint: disable=protected-access 'auto_created': klass, 'app_label': klass._meta.app_label, 'db_tablespace': klass._meta.db_tablespace, 'unique_together': (from_, to), 'ordering': (sort_field_name,), 'verbose_name': _('%(from)s-%(to)s relationship') % {'from': from_, 'to': to}, 'verbose_name_plural': _('%(from)s-%(to)s relationships') % {'from': from_, 'to': to}, 'apps': field.model._meta.apps, }) # Construct and return the new class. return type(force_str(name), base_classes, { 'Meta': meta, '__module__': klass.__module__, from_: models.ForeignKey( klass, related_name='%s+' % name, db_tablespace=field.db_tablespace, db_constraint=field.remote_field.db_constraint, on_delete=models.CASCADE, ), to: models.ForeignKey( to_model, related_name='%s+' % name, db_tablespace=field.db_tablespace, db_constraint=field.remote_field.db_constraint, on_delete=models.CASCADE, ), # Sort fields sort_field_name: sort_field, '_sort_field_name': sort_field_name, })
{ "repo_name": "gregmuellegger/django-sortedm2m", "path": "sortedm2m/fields.py", "copies": "1", "size": "14861", "license": "bsd-3-clause", "hash": -6838770220607004000, "line_mean": 43.7620481928, "line_max": 120, "alpha_frac": 0.577955723, "autogenerated": false, "ratio": 4.270402298850574, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5348358021850574, "avg_score": null, "num_lines": null }
from functools import partial from django.db.models.utils import make_model_tuple from django.dispatch import Signal class_prepared = Signal() class ModelSignal(Signal): """ Signal subclass that allows the sender to be lazily specified as a string of the `app_label.ModelName` form. """ def _lazy_method(self, method, apps, receiver, sender, **kwargs): from django.db.models.options import Options # This partial takes a single optional argument named "sender". partial_method = partial(method, receiver, **kwargs) if isinstance(sender, str): apps = apps or Options.default_apps apps.lazy_model_operation(partial_method, make_model_tuple(sender)) else: return partial_method(sender) def connect(self, receiver, sender=None, weak=True, dispatch_uid=None, apps=None): self._lazy_method( super().connect, apps, receiver, sender, weak=weak, dispatch_uid=dispatch_uid, ) def disconnect(self, receiver=None, sender=None, dispatch_uid=None, apps=None): return self._lazy_method( super().disconnect, apps, receiver, sender, dispatch_uid=dispatch_uid ) pre_init = ModelSignal(use_caching=True) post_init = ModelSignal(use_caching=True) pre_save = ModelSignal(use_caching=True) post_save = ModelSignal(use_caching=True) pre_delete = ModelSignal(use_caching=True) post_delete = ModelSignal(use_caching=True) m2m_changed = ModelSignal(use_caching=True) pre_migrate = Signal() post_migrate = Signal()
{ "repo_name": "kaedroho/django", "path": "django/db/models/signals.py", "copies": "29", "size": "1573", "license": "bsd-3-clause", "hash": 1276356082449198000, "line_mean": 31.1020408163, "line_max": 86, "alpha_frac": 0.6840432295, "autogenerated": false, "ratio": 3.7995169082125604, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": null, "num_lines": null }
from functools import partial from django.db.models.utils import make_model_tuple from django.dispatch import Signal class_prepared = Signal(providing_args=["class"]) class ModelSignal(Signal): """ Signal subclass that allows the sender to be lazily specified as a string of the `app_label.ModelName` form. """ def _lazy_method(self, method, apps, receiver, sender, **kwargs): from django.db.models.options import Options # This partial takes a single optional argument named "sender". partial_method = partial(method, receiver, **kwargs) if isinstance(sender, str): apps = apps or Options.default_apps apps.lazy_model_operation(partial_method, make_model_tuple(sender)) else: return partial_method(sender) def connect(self, receiver, sender=None, weak=True, dispatch_uid=None, apps=None): self._lazy_method( super().connect, apps, receiver, sender, weak=weak, dispatch_uid=dispatch_uid, ) def disconnect(self, receiver=None, sender=None, dispatch_uid=None, apps=None): return self._lazy_method( super().disconnect, apps, receiver, sender, dispatch_uid=dispatch_uid ) pre_init = ModelSignal(providing_args=["instance", "args", "kwargs"], use_caching=True) post_init = ModelSignal(providing_args=["instance"], use_caching=True) pre_save = ModelSignal(providing_args=["instance", "raw", "using", "update_fields"], use_caching=True) post_save = ModelSignal(providing_args=["instance", "raw", "created", "using", "update_fields"], use_caching=True) pre_delete = ModelSignal(providing_args=["instance", "using"], use_caching=True) post_delete = ModelSignal(providing_args=["instance", "using"], use_caching=True) m2m_changed = ModelSignal( providing_args=["action", "instance", "reverse", "model", "pk_set", "using"], use_caching=True, ) pre_migrate = Signal(providing_args=["app_config", "verbosity", "interactive", "using", "apps", "plan"]) post_migrate = Signal(providing_args=["app_config", "verbosity", "interactive", "using", "apps", "plan"])
{ "repo_name": "fenginx/django", "path": "django/db/models/signals.py", "copies": "55", "size": "2160", "license": "bsd-3-clause", "hash": -5581405778138045000, "line_mean": 39.7547169811, "line_max": 114, "alpha_frac": 0.6694444444, "autogenerated": false, "ratio": 3.75, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": null, "num_lines": null }
from functools import partial from django.db.models.utils import make_model_tuple from django.dispatch import Signal class_prepared = Signal(providing_args=["class"]) class ModelSignal(Signal): """ Signal subclass that allows the sender to be lazily specified as a string of the `app_label.ModelName` form. """ def connect(self, receiver, sender=None, weak=True, dispatch_uid=None, apps=None): # Takes a single optional argument named "sender" connect = partial(super(ModelSignal, self).connect, receiver, weak=weak, dispatch_uid=dispatch_uid) models = [make_model_tuple(sender)] if sender else [] if not apps: from django.db.models.base import Options apps = sender._meta.apps if hasattr(sender, '_meta') else Options.default_apps apps.lazy_model_operation(connect, *models) pre_init = ModelSignal(providing_args=["instance", "args", "kwargs"], use_caching=True) post_init = ModelSignal(providing_args=["instance"], use_caching=True) pre_save = ModelSignal(providing_args=["instance", "raw", "using", "update_fields"], use_caching=True) post_save = ModelSignal(providing_args=["instance", "raw", "created", "using", "update_fields"], use_caching=True) pre_delete = ModelSignal(providing_args=["instance", "using"], use_caching=True) post_delete = ModelSignal(providing_args=["instance", "using"], use_caching=True) m2m_changed = ModelSignal( providing_args=["action", "instance", "reverse", "model", "pk_set", "using"], use_caching=True, ) pre_migrate = Signal(providing_args=["app_config", "verbosity", "interactive", "using", "apps", "plan"]) post_migrate = Signal(providing_args=["app_config", "verbosity", "interactive", "using", "apps", "plan"])
{ "repo_name": "willhardy/django", "path": "django/db/models/signals.py", "copies": "2", "size": "1778", "license": "bsd-3-clause", "hash": -698488054041501000, "line_mean": 42.3658536585, "line_max": 114, "alpha_frac": 0.6867266592, "autogenerated": false, "ratio": 3.6285714285714286, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0029087856644995362, "num_lines": 41 }
from functools import partial from django.forms.forms import DeclarativeFieldsMetaclass from django.forms.models import ALL_FIELDS from django.core.exceptions import FieldError, ImproperlyConfigured from django.forms import models as model_forms from django.utils import six from mongoengine.fields import ObjectIdField, FileField from mongoengine.base import ValidationError def construct_instance(form, instance, fields=None, exclude=None): """ Constructs and returns a model instance from the bound ``form``'s ``cleaned_data``, but does not save the returned instance to the database. """ opts = instance._meta cleaned_data = form.cleaned_data file_field_list = [] for f in opts.fields: try: if not f.editable or isinstance(f, ObjectIdField) or f.name not in cleaned_data: continue except AttributeError: # probably this is StringField() added automatically for inherited fields # so we ignore it continue if fields is not None and f.name not in fields: continue if exclude and f.name in exclude: continue # Defer saving file-type fields until after the other fields, so a # callable upload_to can use the values from other fields. if isinstance(f, FileField): file_field_list.append(f) else: f.save_form_data(instance, cleaned_data[f.name]) for f in file_field_list: f.save_form_data(instance, cleaned_data[f.name]) return instance def save_instance(form, instance, fields=None, fail_message='saved', commit=True, exclude=None, construct=True): """ Saves bound Form ``form``'s cleaned_data into document instance ``instance``. If commit=True, then the changes to ``instance`` will be saved to the database. Returns ``instance``. If construct=False, assume ``instance`` has already been constructed and just needs to be saved. """ instance = construct_instance(form, instance, fields, exclude) if form.errors: raise ValueError("The %s could not be %s because the data didn't" " validate." % (instance.__class__.__name__, fail_message)) if commit and hasattr(instance, 'save'): # see BaseDocumentForm._post_clean for an explanation if hasattr(form, '_delete_before_save'): fields = instance._fields new_fields = dict([(n, f) for n, f in six.iteritems(fields) if not n in form._delete_before_save]) if hasattr(instance, '_changed_fields'): for field in form._delete_before_save: instance._changed_fields.remove(field) instance._fields = new_fields instance.save() instance._fields = fields else: instance.save() return instance class DocumentFormOptions(model_forms.ModelFormOptions): def __init__(self, options=None): super(DocumentFormOptions, self).__init__(options) self.model = getattr(options, 'document', None) or getattr(options, 'model', None) if self.model is not None: options.model = self.model self.embedded_field = getattr(options, 'embedded_field', None) class DocumentFormMetaclass(DeclarativeFieldsMetaclass): def __new__(mcs, name, bases, attrs): formfield_callback = attrs.pop('formfield_callback', None) new_class = super(DocumentFormMetaclass, mcs).__new__(mcs, name, bases, attrs) if bases == (BaseDocumentForm,): return new_class opts = new_class._meta = DocumentFormOptions(getattr(new_class, 'Meta', None)) # We check if a string was passed to `fields` or `exclude`, # which is likely to be a mistake where the user typed ('foo') instead # of ('foo',) for opt in ['fields', 'exclude', 'localized_fields']: value = getattr(opts, opt) if isinstance(value, six.string_types) and value != ALL_FIELDS: msg = ("%(model)s.Meta.%(opt)s cannot be a string. " "Did you mean to type: ('%(value)s',)?" % { 'model': new_class.__name__, 'opt': opt, 'value': value, }) raise TypeError(msg) if opts.model: # If a model is defined, extract form fields from it. if opts.fields is None and opts.exclude is None: raise ImproperlyConfigured( "Creating a ModelForm without either the 'fields' attribute " "or the 'exclude' attribute is prohibited; form %s " "needs updating." % name ) if opts.fields == ALL_FIELDS: # Sentinel for fields_for_model to indicate "get the list of # fields from the model" opts.fields = None fields = model_forms.fields_for_model( opts.model, opts.fields, opts.exclude, opts.widgets, formfield_callback, opts.localized_fields, opts.labels, opts.help_texts, opts.error_messages, opts.field_classes, ) # make sure opts.fields doesn't specify an invalid field none_model_fields = [k for k, v in six.iteritems(fields) if not v] missing_fields = (set(none_model_fields) - set(new_class.declared_fields.keys())) if missing_fields: message = 'Unknown field(s) (%s) specified for %s' message = message % (', '.join(missing_fields), opts.model.__name__) raise FieldError(message) # Override default model fields with any custom declared ones # (plus, include all the other declared fields). fields.update(new_class.declared_fields) else: fields = new_class.declared_fields new_class.base_fields = fields return new_class class BaseDocumentForm(model_forms.BaseModelForm): def _save_m2m(self): pass def _post_clean(self): opts = self._meta # mongo MetaDict does not have fields attribute # adding it here istead of rewriting code self.instance._meta.fields = opts.model._meta.fields exclude = self._get_validation_exclusions() try: self.instance = construct_instance(self, self.instance, opts.fields, exclude) except ValidationError as e: self._update_errors(e) def save(self, commit=True): """ Saves this ``form``'s cleaned_data into model instance ``self.instance``. If commit=True, then the changes to ``instance`` will be saved to the database. Returns ``instance``. """ if self.errors: try: if self.instance.pk is None: fail_message = 'created' else: fail_message = 'changed' except (KeyError, AttributeError): fail_message = 'embedded document saved' raise ValueError( "The %s could not be %s because the data didn't" " validate." % (self.instance.__class__.__name__, fail_message)) if commit: self.instance.save() else: self.save_m2m = self._save_m2m return self.instance save.alters_data = True @six.add_metaclass(DocumentFormMetaclass) class DocumentForm(BaseDocumentForm): pass documentform_factory = partial(model_forms.modelform_factory, form=DocumentForm) @six.add_metaclass(DocumentFormMetaclass) class EmbeddedDocumentForm(BaseDocumentForm): def __init__(self, parent_document, *args, **kwargs): super(EmbeddedDocumentForm, self).__init__(*args, **kwargs) self.parent_document = parent_document if self._meta.embedded_field is None: raise FieldError("%s.Meta must have defined embedded_field" % self.__class__.__name__) if not hasattr(self.parent_document, self._meta.embedded_field): raise FieldError("Parent document must have field %s" % self._meta.embedded_field) def save(self, commit=True): if self.errors: raise ValueError( "The %s could not be saved because the data didn't" " validate." % self.instance.__class__.__name__ ) def save(*args, **kwargs): instance = construct_instance(self, self.instance, self.fields, self._meta.exclude) l = getattr(self.parent_document, self._meta.embedded_field) l.append(instance) setattr(self.parent_document, self._meta.embedded_field, l) self.parent_document.save(*args, **kwargs) if commit: save() else: self.instance.save = save return self.instance class BaseDocumentFormSet(model_forms.BaseModelFormSet): """ A ``FormSet`` for editing a queryset and/or adding new objects to it. """ documentformset_factory = partial( model_forms.modelformset_factory, form=DocumentForm, formset=BaseDocumentFormSet, ) class BaseInlineDocumentFormSet(BaseDocumentFormSet): """ A formset for child objects related to a parent. self.instance -> the document containing the inline objects """ def __init__(self, data=None, files=None, instance=None, save_as_new=False, prefix=None, queryset=[], **kwargs): self.instance = instance self.save_as_new = save_as_new super(BaseInlineDocumentFormSet, self).__init__(data, files, prefix=prefix, queryset=queryset, **kwargs) def initial_form_count(self): if self.save_as_new: return 0 return super(BaseInlineDocumentFormSet, self).initial_form_count() #@classmethod def get_default_prefix(cls): return cls.model.__name__.lower() get_default_prefix = classmethod(get_default_prefix) def add_fields(self, form, index): super(BaseInlineDocumentFormSet, self).add_fields(form, index) # Add the generated field to form._meta.fields if it's defined to make # sure validation isn't skipped on that field. if form._meta.fields: if isinstance(form._meta.fields, tuple): form._meta.fields = list(form._meta.fields) #form._meta.fields.append(self.fk.name) def get_unique_error_message(self, unique_check): unique_check = [field for field in unique_check if field != self.fk.name] return super(BaseInlineDocumentFormSet, self).get_unique_error_message(unique_check) def inlineformset_factory(document, form=DocumentForm, formset=BaseInlineDocumentFormSet, fields=None, exclude=None, extra=1, can_order=False, can_delete=True, max_num=None, formfield_callback=None): """ Returns an ``InlineFormSet`` for the given kwargs. You must provide ``fk_name`` if ``model`` has more than one ``ForeignKey`` to ``parent_model``. """ kwargs = { 'form': form, 'formfield_callback': formfield_callback, 'formset': formset, 'extra': extra, 'can_delete': can_delete, 'can_order': can_order, 'fields': fields, 'exclude': exclude, 'max_num': max_num, } FormSet = documentformset_factory(document, **kwargs) return FormSet class EmbeddedDocumentFormSet(BaseInlineDocumentFormSet): def __init__(self, parent_document=None, data=None, files=None, instance=None, save_as_new=False, prefix=None, queryset=[], **kwargs): self.parent_document = parent_document super(EmbeddedDocumentFormSet, self).__init__(data, files, instance, save_as_new, prefix, queryset, **kwargs) def _construct_form(self, i, **kwargs): defaults = {'parent_document': self.parent_document} defaults.update(kwargs) form = super(BaseDocumentFormSet, self)._construct_form(i, **defaults) return form def embeddedformset_factory(document, parent_document, form=EmbeddedDocumentForm, formset=EmbeddedDocumentFormSet, fields=None, exclude=None, extra=1, can_order=False, can_delete=True, max_num=None, formfield_callback=None): """ Returns an ``InlineFormSet`` for the given kwargs. You must provide ``fk_name`` if ``model`` has more than one ``ForeignKey`` to ``parent_model``. """ kwargs = { 'form': form, 'formfield_callback': formfield_callback, 'formset': formset, 'extra': extra, 'can_delete': can_delete, 'can_order': can_order, 'fields': fields, 'exclude': exclude, 'max_num': max_num, } FormSet = inlineformset_factory(document, **kwargs) FormSet.parent_document = parent_document return FormSet
{ "repo_name": "iandd0824/ri-app", "path": "web/django_mongoengine/forms/documents.py", "copies": "1", "size": "13332", "license": "apache-2.0", "hash": -6650050636934861000, "line_mean": 35.8287292818, "line_max": 117, "alpha_frac": 0.597359736, "autogenerated": false, "ratio": 4.361138370951914, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5458498106951915, "avg_score": null, "num_lines": null }
from functools import partial from django.forms import Media from fluent_contents import extensions from fluent_contents.admin.placeholdereditor import ( PlaceholderEditorAdmin, PlaceholderEditorInline, ) from fluent_contents.models import PlaceholderData from fluent_contents.models.fields import PlaceholderField class PlaceholderFieldInline(PlaceholderEditorInline): """ The inline used to process placeholder fields. """ template = "admin/fluent_contents/placeholderfield/inline_init.html" @property def media(self): # Avoid cp_tabs.js for the placeholder field. media = super(PlaceholderFieldInline, self).media return Media( js=[f for f in media._js if not f.endswith("cp_tabs.js")], css=media._css ) class PlaceholderFieldAdmin(PlaceholderEditorAdmin): """ The base functionality for :class:`~django.contrib.admin.ModelAdmin` dialogs to display placeholder fields. This class loads the :class:`~fluent_contents.models.ContentItem` inlines, and initializes the frontend editor for the :class:`~fluent_contents.models.PlaceholderField`. The placeholder will be displayed in the admin: .. image:: /images/admin/placeholderfieldadmin1.png :width: 770px :height: 562px :alt: django-fluent-contents placeholder field preview """ placeholder_inline = PlaceholderFieldInline def get_form(self, request, obj=None, **kwargs): kwargs["formfield_callback"] = partial( self.formfield_for_dbfield, request=request, obj=obj ) return super(PlaceholderFieldAdmin, self).get_form(request, obj=obj, **kwargs) def formfield_for_dbfield(self, db_field, **kwargs): obj = kwargs.pop("obj", None) if isinstance(db_field, PlaceholderField): kwargs["parent_object"] = obj return super(PlaceholderFieldAdmin, self).formfield_for_dbfield( db_field, **kwargs ) def get_placeholder_data(self, request, obj=None): """ Return the data of the placeholder fields. """ # Return all placeholder fields in the model. if not hasattr(self.model, "_meta_placeholder_fields"): return [] data = [] for name, field in self.model._meta_placeholder_fields.items(): assert isinstance(field, PlaceholderField) data.append( PlaceholderData( slot=field.slot, title=field.verbose_name.capitalize(), fallback_language=None, # Information cant' be known by "render_placeholder" in the template. ) ) return data def get_all_allowed_plugins(self): """ Return which plugins are allowed by the placeholder fields. """ # Get all allowed plugins of the various placeholders together. if not hasattr(self.model, "_meta_placeholder_fields"): # No placeholder fields in the model, no need for inlines. return [] plugins = [] for name, field in self.model._meta_placeholder_fields.items(): assert isinstance(field, PlaceholderField) if field.plugins is None: # no limitations, so all is allowed return extensions.plugin_pool.get_plugins() else: plugins += field.plugins return list(set(plugins))
{ "repo_name": "edoburu/django-fluent-contents", "path": "fluent_contents/admin/placeholderfield.py", "copies": "2", "size": "3474", "license": "apache-2.0", "hash": -520329710038733500, "line_mean": 34.0909090909, "line_max": 114, "alpha_frac": 0.6419113414, "autogenerated": false, "ratio": 4.448143405889884, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6090054747289885, "avg_score": null, "num_lines": null }
from functools import partial from django.http import Http404 from lxml.builder import E from open511_server.models import Camera from open511_server.utils.views import APIView, Resource from open511_server.views import CommonFilters, CommonListView class CameraListView(CommonListView): model = Camera resource_name_plural = 'cameras' filters = { 'bbox': CommonFilters.bbox, 'jurisdiction': CommonFilters.jurisdiction, 'road_name': partial(CommonFilters.xpath, 'roads/road/name/text()'), 'area_id': partial(CommonFilters.xpath, 'areas/area/id/text()'), 'area_name': partial(CommonFilters.xpath, 'areas/area/name/text()'), 'geography': None, # dealt with in post_filter 'tolerance': None, # dealt with in post_filter } class CameraView(APIView): model = Camera def get(self, request, jurisdiction_id, id): base_qs = self.model.objects.filter(jurisdiction__id=jurisdiction_id) try: obj = base_qs.get(id=id) except Camera.DoesNotExist: raise Http404 return Resource(E.events(obj.to_full_xml_element( accept_language=request.accept_language, )))
{ "repo_name": "Open511/open511-server", "path": "open511_server/views/cameras.py", "copies": "1", "size": "1211", "license": "mit", "hash": -6813426739901830000, "line_mean": 30.0512820513, "line_max": 77, "alpha_frac": 0.671345995, "autogenerated": false, "ratio": 3.692073170731707, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9855297008486743, "avg_score": 0.0016244314489928524, "num_lines": 39 }
from functools import partial from django import forms from django.conf import settings from django.contrib import admin from django.contrib.admin.widgets import FilteredSelectMultiple from django.db.models import TextField from django.contrib.gis.db.models import ManyToManyField from django.contrib.admin.utils import model_ngettext from django.core.exceptions import PermissionDenied, ValidationError from django.utils.encoding import force_text from django.utils.translation import ugettext_lazy as _ from ckeditor_uploader.widgets import CKEditorUploadingWidget from djgeojson.fields import GeoJSONFormField from leaflet.admin import LeafletGeoAdmin from nested_admin.nested import NestedModelAdminMixin, NestedStackedInline from parler.admin import TranslatableAdmin, TranslatableStackedInline from parler.forms import TranslatableModelForm, TranslatableBaseInlineFormSet from democracy import models from democracy.admin.widgets import Select2SelectMultiple, ShortTextAreaWidget from democracy.enums import InitialSectionType from democracy.models.utils import copy_hearing from democracy.plugins import get_implementation class FixedModelForm(TranslatableModelForm): # Taken from https://github.com/asyncee/django-easy-select2/blob/master/easy_select2/forms.py """ Simple child of ModelForm that removes the 'Hold down "Control" ...' message that is enforced in select multiple fields. See https://github.com/asyncee/django-easy-select2/issues/2 and https://code.djangoproject.com/ticket/9321 Removes also help_texts of GeoJSONFormFields as those will have maps. """ def __init__(self, *args, **kwargs): super(FixedModelForm, self).__init__(*args, **kwargs) msg = force_text(_('Hold down "Control", or "Command" on a Mac, to select more than one.')) for name, field in self.fields.items(): if isinstance(field, GeoJSONFormField): field.help_text = '' else: field.help_text = field.help_text.replace(msg, '') # Inlines class SectionImageInline(TranslatableStackedInline, NestedStackedInline): model = models.SectionImage extra = 0 exclude = ("title",) formfield_overrides = { TextField: {'widget': ShortTextAreaWidget} } class SectionInlineFormSet(TranslatableBaseInlineFormSet): def clean(self): super().clean() # validate that there is exactly one main and no more than one closure info sections mains = 0 closure_infos = 0 for form in self.forms: if not hasattr(form, 'cleaned_data') or form.cleaned_data.get('DELETE'): continue section_type = form.cleaned_data.get('type') if not section_type: continue if section_type.identifier == InitialSectionType.MAIN: mains += 1 elif section_type.identifier == InitialSectionType.CLOSURE_INFO: closure_infos += 1 if mains != 1: raise ValidationError(_('There must be exactly one main section.')) if closure_infos > 1: raise ValidationError(_('There cannot be more than one closure info section.')) class SectionInline(NestedStackedInline, TranslatableStackedInline): model = models.Section extra = 1 inlines = [SectionImageInline] exclude = ("published",) formfield_overrides = { TextField: {'widget': ShortTextAreaWidget} } formset = SectionInlineFormSet def formfield_for_dbfield(self, db_field, **kwargs): obj = kwargs.pop("obj", None) if db_field.name == "content": kwargs["widget"] = CKEditorUploadingWidget # Some initial value is needed for every section to workaround a bug in nested inlines # that causes an integrity error to be raised when a section image is added but the parent # section isn't saved. kwargs["initial"] = _("Enter text here.") if not getattr(obj, "pk", None): if db_field.name == "type": kwargs["initial"] = models.SectionType.objects.get(identifier=InitialSectionType.MAIN) elif db_field.name == "content": kwargs["initial"] = _("Enter the introduction text for the hearing here.") field = super().formfield_for_dbfield(db_field, **kwargs) if db_field.name == "plugin_identifier": widget = self._get_plugin_selection_widget(hearing=obj) field.label = _("Plugin") field.widget = widget if db_field.name == "id" and not (obj and obj.pk): field.widget = forms.HiddenInput() return field def _get_plugin_selection_widget(self, hearing): choices = [("", "------")] plugins = getattr(settings, "DEMOCRACY_PLUGINS") if hearing and hearing.pk: current_plugin_identifiers = set(hearing.sections.values_list("plugin_identifier", flat=True)) else: current_plugin_identifiers = set() for plugin_identifier in sorted(current_plugin_identifiers): if plugin_identifier and plugin_identifier not in plugins: # The plugin has been unregistered or something? choices.append((plugin_identifier, plugin_identifier)) for idfr, classpath in sorted(plugins.items()): choices.append((idfr, get_implementation(idfr).display_name or idfr)) widget = forms.Select(choices=choices) return widget def get_formset(self, request, obj=None, **kwargs): kwargs["formfield_callback"] = partial(self.formfield_for_dbfield, request=request, obj=obj) if getattr(obj, "pk", None): kwargs['extra'] = 0 return super().get_formset(request, obj, **kwargs) # Admins class HearingGeoAdmin(LeafletGeoAdmin): settings_overrides = { 'DEFAULT_CENTER': (60.192059, 24.945831), # Helsinki 'DEFAULT_ZOOM': 11, } class HearingAdmin(NestedModelAdminMixin, HearingGeoAdmin, TranslatableAdmin): class Media: js = ("admin/ckeditor-nested-inline-fix.js",) inlines = [SectionInline] list_display = ("slug", "published", "title", "open_at", "close_at", "force_closed") list_filter = ("published",) search_fields = ("slug", "translations__title") readonly_fields = ("preview_url",) fieldsets = ( (None, { "fields": ("title", "labels", "slug", "preview_url", "organization") }), (_("Availability"), { "fields": ("published", "open_at", "close_at", "force_closed") }), (_("Area"), { "fields": ("geojson",) }), (_("Contact info"), { "fields": ("contact_persons",) }) ) formfield_overrides = { TextField: {'widget': ShortTextAreaWidget} } form = FixedModelForm actions = ("copy_as_draft", "delete_selected") ordering = ("slug",) def copy_as_draft(self, request, queryset): for hearing in queryset: copy_hearing(hearing, published=False) self.message_user(request, _('Copied Hearing "%s" as a draft.' % hearing.title)) def preview_url(self, obj): return obj.preview_url preview_url.short_description = _('Preview URL') def formfield_for_manytomany(self, db_field, request=None, **kwargs): if db_field.name == "labels": kwargs["widget"] = Select2SelectMultiple return super().formfield_for_manytomany(db_field, request, **kwargs) def delete_selected(self, request, queryset): if not self.has_delete_permission(request): raise PermissionDenied hearing_count = queryset.count() if hearing_count: for hearing in queryset: hearing.soft_delete() self.message_user(request, _('Successfully deleted %(count)d %(items)s.') % { 'count': hearing_count, 'items': model_ngettext(self.opts, hearing_count) }) delete_selected.short_description = _('Delete selected %(verbose_name_plural)s') def save_formset(self, request, form, formset, change): objects = formset.save(commit=False) for obj in formset.deleted_objects: obj.soft_delete() for obj in objects: obj.save() formset.save_m2m() class LabelAdmin(TranslatableAdmin, admin.ModelAdmin): exclude = ("published",) class SectionTypeAdmin(TranslatableAdmin, admin.ModelAdmin): fields = ("name_singular", "name_plural") def get_queryset(self, request): return super().get_queryset(request).exclude_initial() class OrganizationAdmin(admin.ModelAdmin): formfield_overrides = { ManyToManyField: {'widget': FilteredSelectMultiple("ylläpitäjät", is_stacked=False)}, } exclude = ('published', ) class ContactPersonAdmin(TranslatableAdmin, admin.ModelAdmin): list_display = ('name', 'title', 'organization', 'phone', 'email') exclude = ('published',) # Wire it up! admin.site.register(models.Label, LabelAdmin) admin.site.register(models.Hearing, HearingAdmin) admin.site.register(models.SectionType, SectionTypeAdmin) admin.site.register(models.Organization, OrganizationAdmin) admin.site.register(models.ContactPerson, ContactPersonAdmin)
{ "repo_name": "stephawe/kerrokantasi", "path": "democracy/admin/__init__.py", "copies": "1", "size": "9350", "license": "mit", "hash": -8054919722457346000, "line_mean": 35.7992125984, "line_max": 106, "alpha_frac": 0.6545415641, "autogenerated": false, "ratio": 4.1103781882146, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0008051180802474549, "num_lines": 254 }
from functools import partial from django import forms from django.contrib import admin, messages from django.forms.models import modelform_factory from django.forms.widgets import CheckboxSelectMultiple from django.shortcuts import render from django.utils.functional import cached_property from django.utils.translation import ugettext_lazy as _ from sentry_sdk import capture_exception class BulkUpdateMixin: actions = ("bulk_update",) bulk_update_fields = None bulk_update_exclude = None bulk_update_form = None @cached_property def BulkUpdateFieldsForm(self): return type( "BulkUpdateFieldsForm", (forms.Form,), { "fields": forms.MultipleChoiceField( label=_("Fields to update"), choices=[ (field_name, field.label) for field_name, field in forms.models.fields_for_model( self.model, fields=self.bulk_update_fields, exclude=self.bulk_update_exclude, ).items() ], widget=CheckboxSelectMultiple, ), }, ) def bulk_update(self, request, queryset): phase = request.POST.get("phase") hidden_fields = [] # phases: None > 'fields' > 'update' if phase is not None: form = self.BulkUpdateFieldsForm(request.POST) if form.is_valid(): hidden_fields = [("fields", field) for field in form.cleaned_data["fields"]] BulkUpdateForm = modelform_factory( self.model, form=self.bulk_update_form or self.form, fields=form.cleaned_data["fields"], formfield_callback=partial(self.formfield_for_dbfield, request=request), ) if phase == "update": form = BulkUpdateForm(request.POST) if form.is_valid(): num_updated = 0 for instance in queryset.all(): try: instance_form = BulkUpdateForm(request.POST, request.FILES, instance=instance) assert instance_form.is_valid() instance_form.save() except Exception: capture_exception() self.message_user( request, _(f"Failed to update item {instance}."), messages.ERROR, ) else: num_updated += 1 self.message_user(request, _(f"{num_updated} items were updated.")) return else: form = BulkUpdateForm() phase = "update" else: form = self.BulkUpdateFieldsForm() phase = "fields" hidden_fields.append(("phase", phase)) return render( request, "leprikon/admin/bulk_update_form.html", { "title": _("Bulk update"), "queryset": queryset, "opts": self.model._meta, "form": form, "media": self.media + form.media, "action": "bulk_update", "action_checkbox_name": admin.helpers.ACTION_CHECKBOX_NAME, "hidden_fields": hidden_fields, }, ) bulk_update.short_description = _("Bulk update selected items")
{ "repo_name": "leprikon-cz/leprikon", "path": "leprikon/admin/bulkupdate.py", "copies": "1", "size": "3821", "license": "bsd-3-clause", "hash": 7911113828961223000, "line_mean": 38.3917525773, "line_max": 110, "alpha_frac": 0.4781470819, "autogenerated": false, "ratio": 5.255845942228335, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6233993024128336, "avg_score": null, "num_lines": null }
from functools import partial from django import forms from django.forms.forms import pretty_name from django.utils.translation import ugettext_lazy as _ from fields import ContactFormField class ContactFieldFormMixin(object): """ Provides the necessary form logic for generating pseudo fields for a contact field object. Each contact field object in the form will receive a set of fields, for all valid groups and labels for that field. If you wish to create a form using only a subset of the valid fields, then provide this using the contact_group_subsets and contact_label_subsets arguments: contact_group_subsets = { 'main_contact': ['business'] 'billing_contact': ['billing'], } contact_label_subsets = { 'main_contact': ['full_name', 'company_name', 'phone'] } Note that existing values for valid fields that have been left off the form will be left intact, so you can, for example, create a seperate model form for billing and personal details using the same field. """ contact_group_subsets = {} contact_label_subsets = {} contact_field_kwargs = {} def __init__( self, contact_group_subsets=None, contact_label_subsets=None, contact_field_kwargs=None, *args, **kwargs ): super(ContactFieldFormMixin, self).__init__(*args, **kwargs) # Find all the contact fields, and create dynamic fields based on # valid_groups and valid_labels, filtered by relevant subsets if contact_group_subsets is None: contact_group_subsets = self.contact_group_subsets if contact_label_subsets is None: contact_label_subsets = self.contact_label_subsets # Get a mapping of required fields and widgets if contact_field_kwargs is None: contact_field_kwargs = self.contact_field_kwargs self._contact_pseudo_fields = {} for field_name, field in filter( lambda (field_name, field): isinstance(field, ContactFormField), self.fields.iteritems() ): valid_groups_for_field = contact_group_subsets.get(field_name) valid_labels_for_field = contact_label_subsets.get(field_name) valid_groups = filter( lambda group: valid_groups_for_field is None or group in valid_groups_for_field, field.get_valid_groups() ) valid_labels = filter( lambda label: valid_labels_for_field is None or label in valid_labels_for_field, field.get_valid_labels() ) self._contact_pseudo_fields[field_name] = {} for valid_group in valid_groups: for valid_label in valid_labels: pseudo_field_name = '%s__%s__%s' % (field_name, valid_group, valid_label) field_kwargs = {} field_kwargs.update(contact_field_kwargs.get(pseudo_field_name, {})) FieldClass = field_kwargs.pop('field', forms.CharField) if not 'required' in field_kwargs: field_kwargs['required'] = False if self[field_name].value() is not None: initial = self.fields[field_name].as_dict(self[field_name].value()).get(valid_group, {}).get(valid_label) else: initial = None pseudo_field = FieldClass( initial=initial, label=field.label_format.format( field=unicode(field.display_name), group=unicode(field.group_display_names.get( valid_group, pretty_name(valid_group) )), label=unicode(field.label_display_names.get( valid_label, pretty_name(valid_label) )) ), **field_kwargs ) self.fields[pseudo_field_name] = pseudo_field self._contact_pseudo_fields[field_name][pseudo_field_name] = pseudo_field def __getattribute__(self, name, *args, **kwargs): if name[:6] == 'clean_' and name[6:] in self._contact_pseudo_fields.keys(): return partial(self._clean_CONTACTFIELD, name[6:]) return super(ContactFieldFormMixin, self).__getattribute__(name, *args, **kwargs) def _clean_CONTACTFIELD(self, contact_field_name): """ Find all the psueduo fields for a contact field in form data, and use them to update the main field. """ cleaned_data = self.fields[contact_field_name].as_dict( self[contact_field_name].value() ) for pseudo_field_name, field in self._contact_pseudo_fields[contact_field_name].iteritems(): pseudo_field_value = self.data.get(pseudo_field_name, None) if pseudo_field_value is not None: if pseudo_field_value or not self.fields[contact_field_name].concise_mode(): field_name, group, label = pseudo_field_name.split('__') cleaned_data.setdefault(group, {})[label] = pseudo_field_value return cleaned_data
{ "repo_name": "ColinBarnwell/django-contactfield", "path": "contactfield/forms.py", "copies": "1", "size": "5385", "license": "mit", "hash": 1713230764770174200, "line_mean": 43.1393442623, "line_max": 129, "alpha_frac": 0.5816155989, "autogenerated": false, "ratio": 4.42481511914544, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0011042867002789155, "num_lines": 122 }
from functools import partial from django import forms from django.utils.functional import cached_property from django.utils.module_loading import import_string from wagtail.core import blocks from wagtail.core.blocks import PageChooserBlock, RichTextBlock, FieldBlock, RawHTMLBlock, IntegerBlock from wagtail.core.models import Page from wagtail.embeds.blocks import EmbedBlock from wagtail.images.blocks import ImageChooserBlock from album.models import Album from core.widgets import JqueryChosenSelectMultipleWithAddObject from face.models import Face from location.models import Location from resources.models import Resource import operator ALIGNMENT_CHOICES = [('left', 'Left column'), ('right', 'Right column')] RichTextMiniBlock = partial(RichTextBlock, features=['bold', 'italic']) RichTextParagraphBlock = partial(RichTextBlock, features=['h2', 'h3', 'h4', 'h5', 'h6', 'bold', 'italic', 'ol', 'ul', 'hr', 'link', 'document-link']) class ModelMultipleChoiceBlock(FieldBlock): def __init__(self, target_model, required=True, help_text=None, **kwargs): self.target_model = target_model self.field = forms.ModelMultipleChoiceField( queryset=self.target_model.objects.all(), widget=JqueryChosenSelectMultipleWithAddObject, required=required, help_text=help_text, ) super(ModelMultipleChoiceBlock, self).__init__(**kwargs) def to_python(self, value): if not value: return value else: return self.target_model.objects.filter(pk__in=value) def get_prep_value(self, value): if not value: return value else: return [each.pk for each in value] def value_from_form(self, value): if not value or all(isinstance(each, self.target_model) for each in value): return value else: return self.target_model.objects.filter(pk__in=value) def value_for_form(self, value): if not value: return value elif all(isinstance(each, self.target_model) for each in value): return [each.pk for each in value] else: return [] # TODO implement caption in the block it is implemented in. class ImageBlock(blocks.StructBlock): image = ImageChooserBlock() class Meta: icon = 'image' template = 'article/blocks/image.html' # TODO: This is implemented in the latest wagtail. Remove it after upgrading. class PageTypeChooserBlock(PageChooserBlock): """Custom implementation of PageChooserBlock to limit page selection to specific page types. """ def __init__(self, for_models=[Page], **kwargs): self.for_models = for_models super(PageTypeChooserBlock, self).__init__(**kwargs) @cached_property def target_model(self): if len(self.for_models) == 1: return self.for_models[0] else: from wagtail.core.models import Page return Page @cached_property def widget(self): from django.utils.translation import ugettext_lazy as _ from wagtail.admin.widgets import AdminPageChooser # Support importing from dotted string in-order to prevent circular-import for certain models(Say Article) self.for_models = [import_string(model) if isinstance(model, str) else model for model in self.for_models] if any(not issubclass(each, Page) for each in self.for_models): raise TypeError("All models passed should be a sub-class of wagtail.core.models.Page") model_names = ' / '.join(each.__name__.lower() for each in self.for_models) admin_page_chooser = AdminPageChooser(target_models=self.for_models) admin_page_chooser.choose_one_text = _('Choose a %s' % model_names) admin_page_chooser.choose_another_text = _('Choose another %s' % model_names) admin_page_chooser.link_to_chosen_text = _('Edit this %s' % model_names) return admin_page_chooser class FullWidthImageBlock(blocks.StructBlock): image = ImageBlock() caption = RichTextMiniBlock(required=False) class Meta: icon = 'image' template = 'article/blocks/full_width_image.html' label = 'Full width image' class TwoColumnImageBlock(blocks.StructBlock): image_left = ImageBlock() image_right = ImageBlock() class Meta: icon = 'image' template = 'article/blocks/two_column_image.html' class ParagraphBlock(blocks.StructBlock): ALIGN_CONTENT_CHOICES = [('default', 'Default'), ('center', 'Center')] content = RichTextParagraphBlock() align_content = blocks.ChoiceBlock(choices=ALIGN_CONTENT_CHOICES, default=ALIGN_CONTENT_CHOICES[0][0]) class Meta: icon = 'title' label = 'Text' template = 'article/blocks/paragraph.html' class PargraphBlockWithOptionalContent(ParagraphBlock): content = RichTextParagraphBlock(required=False) class ParagraphWithImageBlock(blocks.StructBlock): image = ImageBlock() align_image = blocks.ChoiceBlock(choices=ALIGNMENT_CHOICES, default=ALIGNMENT_CHOICES[0][0]) content = ParagraphBlock() class Meta: icon = 'doc-full' label = 'Paragraphs with an image' template = 'article/blocks/paragraph_with_image.html' class FaceBlock(blocks.StructBlock): face = PageTypeChooserBlock(for_models=[Face]) class Meta: icon = 'image' template = 'article/blocks/face.html' class ParagraphWithBlockQuoteBlock(blocks.StructBlock): quote = RichTextMiniBlock() align_quote = blocks.ChoiceBlock(choices=ALIGNMENT_CHOICES, default=ALIGNMENT_CHOICES[1][0]) content = ParagraphBlock() class Meta: icon = 'openquote' label = 'Quote with text' template = 'article/blocks/paragraph_with_block_quote.html' class FullWidthBlockQuote(blocks.StructBlock): quote = RichTextMiniBlock() class Meta: icon = 'openquote' label = 'Full width quote' template = 'article/blocks/full_width_block_quote.html' class NColumnParagraphBlock(blocks.StructBlock): paragraph = blocks.ListBlock(ParagraphBlock()) class Meta: template = 'article/blocks/columnar_paragraph.html' label = 'Columnar text' icon = 'title' class ParagraphWithEmbedBlock(blocks.StructBlock): embed = EmbedBlock() embed_caption = RichTextMiniBlock(required=False) embed_max_width = IntegerBlock(required=False, help_text="Optional field. Maximum width of the content in pixels to" " be requested from the content provider(e.g YouTube). " "If the requested width is not supported, provider will be" " supplying the content with nearest available width.") embed_align = blocks.ChoiceBlock(choices=ALIGNMENT_CHOICES, default=ALIGNMENT_CHOICES[0][0]) content = ParagraphBlock() class Meta: icon = 'media' label = 'Embed with text' template = 'article/blocks/paragraph_with_embed.html' class NColumnImageBlock(blocks.StructBlock): images = blocks.ListBlock(ImageBlock()) height = IntegerBlock(min_value=0, required=True, default=380) caption = RichTextMiniBlock(required=False) class Meta: template = 'article/blocks/columnar_image.html' label = 'Columnar Images' class ParagraphWithRawEmbedBlock(blocks.StructBlock): embed = RawHTMLBlock(help_text="Embed HTML code(an iframe)") embed_caption = RichTextMiniBlock(required=False) embed_align = blocks.ChoiceBlock(choices=ALIGNMENT_CHOICES, default=ALIGNMENT_CHOICES[0][0]) content = PargraphBlockWithOptionalContent(required=False) class Meta: icon = 'media' label = 'Raw embed with text' template = 'article/blocks/paragraph_with_raw_embed.html' class FullWidthEmbedBlock(blocks.StructBlock): embed = EmbedBlock(required=True, help_text="Enter URL for the embed block") embed_caption = RichTextMiniBlock(required=False) class Meta: icon = 'media' label = 'Full width embed' template = 'article/blocks/full_width_embed.html' class VideoWithQuoteBlock(blocks.StructBlock): video = EmbedBlock(help_text="YouTube video URL") video_height = IntegerBlock(required=True, default=270) video_caption = RichTextMiniBlock(required=False) quote = RichTextMiniBlock() align_quote = blocks.ChoiceBlock(choices=ALIGNMENT_CHOICES, default=ALIGNMENT_CHOICES[0][1]) class Meta: icon = 'openquote' label = 'Video with quote' template = 'article/blocks/video_with_block_quote.html' class ParagraphWithMapBlock(blocks.StructBlock): locations = ModelMultipleChoiceBlock(target_model=Location) map_align = blocks.ChoiceBlock(choices=ALIGNMENT_CHOICES, default=ALIGNMENT_CHOICES[0][0]) content = ParagraphBlock() class Meta: label = 'Map with text' template = 'article/blocks/paragraph_with_map.html' icon = 'site' class ImageWithCaptionAndHeightBlock(ImageBlock): height = IntegerBlock(min_value=0, required=True, default=380) caption = RichTextMiniBlock(required=False) class ImageWithQuoteAndParagraphBlock(blocks.StructBlock): image = ImageWithCaptionAndHeightBlock(required=True) align_image = blocks.ChoiceBlock(choices=ALIGNMENT_CHOICES, default=ALIGNMENT_CHOICES[0][0]) content_1 = PargraphBlockWithOptionalContent(required=False) quote = FullWidthBlockQuote(required=True) content_2 = PargraphBlockWithOptionalContent(required=False) class Meta: icon = "image" label = 'Image with quote and text' template = 'article/blocks/image_with_quote_and_paragraph.html' # TODO remove this class , this module is deprecated. class ImageWithBlockQuote(blocks.StructBlock): image = ImageWithCaptionAndHeightBlock() quote = RichTextMiniBlock() align_quote = blocks.ChoiceBlock(choices=ALIGNMENT_CHOICES, default=ALIGNMENT_CHOICES[0][0]) class Meta: icon = 'image' template = 'article/blocks/image_with_block_quote.html' label = 'Image with block quote' class ParagraphWithPageBlock(blocks.StructBlock): page = PageTypeChooserBlock(for_models=['article.models.Article', Album, Face, Resource]) align_image = blocks.ChoiceBlock(choices=ALIGNMENT_CHOICES, default=ALIGNMENT_CHOICES[0][0]) content = ParagraphBlock() class Meta: icon = 'link' template = 'article/blocks/paragraph_with_page.html' label = 'Page reference with text' class NColumnImageWithTextBlock(NColumnImageBlock): align_columnar_images = blocks.ChoiceBlock(choices=ALIGNMENT_CHOICES, default=ALIGNMENT_CHOICES[0][0]) content = PargraphBlockWithOptionalContent(required=False) class Meta: icon = 'image' label = 'Columnar images with text'
{ "repo_name": "PARINetwork/pari", "path": "article/streamfields/blocks.py", "copies": "1", "size": "11102", "license": "bsd-3-clause", "hash": 2680538423342437000, "line_mean": 34.928802589, "line_max": 120, "alpha_frac": 0.6811385336, "autogenerated": false, "ratio": 4.065177590626145, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0009462196690194968, "num_lines": 309 }
from functools import partial from django import forms from django.utils import six from django.utils.datastructures import SortedDict from websettings import websettings def get_settingstore_fields(setting_store): fields = [(k, forms.CharField(max_length=255, label=k, initial=partial(getattr, setting_store, k))) for k in setting_store.settings.keys()] return SortedDict(fields) class SettingStoreFieldsMetaclass(type): def __new__(cls, name, bases, attrs): super_new = super(SettingStoreFieldsMetaclass, cls).__new__ new_class = super_new(cls, name, bases, attrs) setting_store = attrs.get('setting_store', websettings) new_class.base_fields = get_settingstore_fields(setting_store) return new_class class BaseSettingStoreForm(forms.BaseForm): def save(self): if hasattr(self, 'cleaned_data'): for k, v in self.cleaned_data.items(): setattr(self.setting_store, k, v) else: # TODO: need more humanization. raise ValueError('Form needs cleaned_data') class SettingStoreForm(six.with_metaclass(SettingStoreFieldsMetaclass, BaseSettingStoreForm)): # For more testable setting_store = websettings
{ "repo_name": "hirokiky/django-websettings", "path": "websettings/forms.py", "copies": "1", "size": "1320", "license": "mit", "hash": 2925833951695193000, "line_mean": 32, "line_max": 94, "alpha_frac": 0.6553030303, "autogenerated": false, "ratio": 4.177215189873418, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5332518220173418, "avg_score": null, "num_lines": null }
from functools import partial from django import forms from pdl.models import Proyecto from pdl.models import Slug DateInput1 = partial(forms.DateInput, {'class': 'datepicker form-control', 'placeholder': 'Fecha inicio'}) DateInput2 = partial(forms.DateInput, {'class': 'datepicker form-control', 'placeholder': 'Fecha fin'}) class SearchAdvancedForm(forms.Form): query = forms.CharField( max_length=200, widget=forms.TextInput(attrs={ 'class': 'form-control', 'placeholder': 'Palabra de búsqueda', }), required=False, ) date_from = forms.DateField( widget=DateInput1(), label="Fecha inicio", required=False, error_messages={'invalid': 'Ingrese fecha válida'}, ) date_to = forms.DateField( widget=DateInput2(), label="Fecha fin", required=False, error_messages={'invalid': 'Ingrese fecha válida'}, ) comision = forms.ChoiceField( widget=forms.Select(attrs={'class': 'form-control'}), label='Comisión', required=False, choices=[ ('---', '---'), ('Agraria', 'Agraria'), ('Ciencia', 'Ciencia'), ('Comercio Exterior', 'Comercio Exterior'), ('Constitución', 'Constitución'), ('Cultura', 'Cultura'), ('Defensa del Consumidor', 'Defensa del Consumidor'), ('Defensa Nacional', 'Defensa Nacional'), ('Descentralización', 'Descentralización'), ('Economía', 'Economía'), ('Educación', 'Educación'), ('Energía', 'Energía'), ('Fiscalización', 'Fiscalización'), ('Inclusión Social', 'Inclusión Social'), ('Inteligencia', 'Inteligencia'), ('Justicia', 'Justicia'), ('Mujer', 'Mujer'), ('Presupuesto', 'Presupuesto'), ('Producción Micro', 'Producción Micro'), ('Pueblos Andinos', 'Pueblos Andinos'), ('Relaciones Exteriores', 'Relaciones Exteriores'), ('Salud', 'Salud'), ('Trabajo', 'Trabajo'), ('Transportes', 'Transportes'), ('Vivienda', 'Vivienda'), ] ) congresista = forms.ModelChoiceField( Slug.objects.all().order_by('nombre'), label='Búsqueda por author de proyecto de ley.', required=False, empty_label='--Escoger nombre--', widget=forms.Select(attrs={'class': 'form-control'}), ) tmp = Proyecto.objects.filter( grupo_parlamentario__isnull=False).exclude( grupo_parlamentario='').values_list( 'grupo_parlamentario', flat=True).order_by('grupo_parlamentario').distinct() choices = [('--Escoger bancada--', '--Escoger bancada--',)] for i in tmp: choices.append((i, i,)) grupo_parlamentario = forms.ChoiceField( choices=choices, label='Búsqueda por grupo parlamentario.', required=False, widget=forms.Select(attrs={'class': 'form-control'}), ) dispensados_2da_votacion = forms.ChoiceField( widget=forms.Select(attrs={'class': 'form-control'}), label='Dispensados 2da votación', required=False, choices=[ ('---', '---'), ('TOTAL dispensados', 'TOTAL dispensados'), ('NÚMERO TOTAL DE LEYES', 'NÙMERO TOTAL DE LEYES'), ('TOTAL aprobados', 'TOTAL aprobados'), ('Dispensados por acuerdo del pleno', 'Dispensados por acuerdo del pleno'), ('Dispensados por junta portavoces', 'Dispensados por junta portavoces'), ('Otros proyectos dispensados', 'Otros proyectos dispensados'), ] ) dictamen = forms.ChoiceField( widget=forms.Select(attrs={'class': 'form-control'}), label='Dictamen', required=False, choices=[ ('---', '---'), ('NÚMERO TOTAL DE LEYES', 'NÙMERO TOTAL DE LEYES'), ('Exonerados de dictamen', 'Exonerados de dictamen'), ] )
{ "repo_name": "proyectosdeley/proyectos_de_ley", "path": "proyectos_de_ley/search_advanced/forms.py", "copies": "1", "size": "4171", "license": "mit", "hash": 1164876113235531000, "line_mean": 35.6725663717, "line_max": 87, "alpha_frac": 0.555019305, "autogenerated": false, "ratio": 3.476510067114094, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4531529372114094, "avg_score": null, "num_lines": null }
from functools import partial from django import forms from us_ignite.challenges.models import Entry question_field = partial(forms.CharField, widget=forms.Textarea) def get_entry_choices(): valid = [Entry.DRAFT, Entry.SUBMITTED] return [i for i in Entry.STATUS_CHOICES if i[0] in valid] def get_field_name(question_id): return u'question_%s' % question_id def get_challenge_form(challenge): """Generate a dynamic form from the ``Questions`` in a ``Challenge`` The name of the field is a variation of the ``question.id``. """ question_list = challenge.question_set.all() properties = { 'status': forms.ChoiceField(choices=get_entry_choices()), } for question in question_list: name = get_field_name(question.id) properties[name] = question_field( label=question.question, required=question.is_required) return type('ChallengeForm', (forms.Form, ), properties) def get_challenge_initial_data(entry): """Generates the entry initial data for the ``ChallengeForm`` The data is bind to each dynamic field by the question id which is unique and replicable between challenges and entries.""" answer_list = entry.entryanswer_set.all() initial_data = { 'status': entry.status, } for answer in answer_list: name = get_field_name(answer.question_id) initial_data[name] = answer.answer return initial_data
{ "repo_name": "us-ignite/us_ignite", "path": "us_ignite/challenges/forms.py", "copies": "1", "size": "1444", "license": "bsd-3-clause", "hash": -410391114126424400, "line_mean": 29.0833333333, "line_max": 72, "alpha_frac": 0.6849030471, "autogenerated": false, "ratio": 3.82010582010582, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 48 }
from functools import partial from django import http import commonware import requests import waffle from rest_framework.exceptions import ParseError from rest_framework.generics import ListAPIView from rest_framework.response import Response from rest_framework.views import APIView from lib.metrics import get_monolith_client import amo from stats.models import Contribution from mkt.api.authentication import (RestOAuthAuthentication, RestSharedSecretAuthentication) from mkt.api.authorization import (AllowAppOwnerOrPermission, PermissionAuthorization) from mkt.api.base import CORSMixin, SlugOrIdMixin from mkt.api.exceptions import NotImplemented, ServiceUnavailable from mkt.webapps.models import Webapp from .forms import StatsForm log = commonware.log.getLogger('z.stats') # Map of URL metric name to monolith metric name. # # The 'dimensions' key is optional query string arguments with defaults that is # passed to the monolith client and used in the facet filters. If the default # is `None`, the dimension is excluded unless specified via the API. # # The 'lines' key is optional and used for multi-line charts. The format is: # {'<name>': {'<dimension-key>': '<dimension-value>'}} # where <name> is what's returned in the JSON output and the dimension # key/value is what's sent to Monolith similar to the 'dimensions' above. # # The 'coerce' key is optional and used to coerce data types returned from # monolith to other types. Provide the name of the key in the data you want to # coerce with a callback for how you want the data coerced. E.g.: # {'count': str} lines = lambda name, vals: dict((val, {name: val}) for val in vals) STATS = { 'apps_added_by_package': { 'metric': 'apps_added_package_count', 'dimensions': {'region': 'us'}, 'lines': lines('package_type', amo.ADDON_WEBAPP_TYPES.values()), }, 'apps_added_by_premium': { 'metric': 'apps_added_premium_count', 'dimensions': {'region': 'us'}, 'lines': lines('premium_type', amo.ADDON_PREMIUM_API.values()), }, 'apps_available_by_package': { 'metric': 'apps_available_package_count', 'dimensions': {'region': 'us'}, 'lines': lines('package_type', amo.ADDON_WEBAPP_TYPES.values()), }, 'apps_available_by_premium': { 'metric': 'apps_available_premium_count', 'dimensions': {'region': 'us'}, 'lines': lines('premium_type', amo.ADDON_PREMIUM_API.values()), }, 'apps_installed': { 'metric': 'app_installs', 'dimensions': {'region': None}, }, 'total_developers': { 'metric': 'total_dev_count' }, 'total_visits': { 'metric': 'visits' }, 'revenue': { 'metric': 'gross_revenue', # Counts are floats. Let's convert them to strings with 2 decimals. 'coerce': {'count': lambda d: '{0:.2f}'.format(d)}, }, } APP_STATS = { 'installs': { 'metric': 'app_installs', 'dimensions': {'region': None}, }, 'visits': { 'metric': 'app_visits', }, 'revenue': { 'metric': 'gross_revenue', # Counts are floats. Let's convert them to strings with 2 decimals. 'coerce': {'count': lambda d: '{0:.2f}'.format(d)}, }, } def _get_monolith_data(stat, start, end, interval, dimensions): # If stat has a 'lines' attribute, it's a multi-line graph. Do a # request for each item in 'lines' and compose them in a single # response. try: client = get_monolith_client() except requests.ConnectionError as e: log.info('Monolith connection error: {0}'.format(e)) raise ServiceUnavailable def _coerce(data): for key, coerce in stat.get('coerce', {}).items(): if data.get(key): data[key] = coerce(data[key]) return data try: data = {} if 'lines' in stat: for line_name, line_dimension in stat['lines'].items(): dimensions.update(line_dimension) data[line_name] = map(_coerce, client(stat['metric'], start, end, interval, **dimensions)) else: data['objects'] = map(_coerce, client(stat['metric'], start, end, interval, **dimensions)) except ValueError as e: # This occurs if monolith doesn't have our metric and we get an # elasticsearch SearchPhaseExecutionException error. log.info('Monolith ValueError for metric {0}: {1}'.format( stat['metric'], e)) raise ParseError('Invalid metric at this time. Try again later.') return data class GlobalStats(CORSMixin, APIView): authentication_classes = (RestOAuthAuthentication, RestSharedSecretAuthentication) cors_allowed_methods = ['get'] permission_classes = (partial(PermissionAuthorization, 'Stats', 'View'),) def get(self, request, metric): if metric not in STATS: raise http.Http404('No metric by that name.') if not waffle.switch_is_active('stats-api'): raise NotImplemented('Stats not enabled for this host.') stat = STATS[metric] # Perform form validation. form = StatsForm(request.GET) if not form.is_valid(): raise ParseError(dict(form.errors.items())) qs = form.cleaned_data dimensions = {} if 'dimensions' in stat: for key, default in stat['dimensions'].items(): val = request.GET.get(key, default) if val is not None: # Avoid passing kwargs to the monolith client when the # dimension is None to avoid facet filters being applied. dimensions[key] = request.GET.get(key, default) return Response(_get_monolith_data(stat, qs.get('start'), qs.get('end'), qs.get('interval'), dimensions)) class AppStats(CORSMixin, SlugOrIdMixin, ListAPIView): authentication_classes = (RestOAuthAuthentication, RestSharedSecretAuthentication) cors_allowed_methods = ['get'] permission_classes = (AllowAppOwnerOrPermission('Stats', 'View'),) queryset = Webapp.objects.all() slug_field = 'app_slug' def get(self, request, pk, metric): if metric not in APP_STATS: raise http.Http404('No metric by that name.') if not waffle.switch_is_active('stats-api'): raise NotImplemented('Stats not enabled for this host.') app = self.get_object() stat = APP_STATS[metric] # Perform form validation. form = StatsForm(request.GET) if not form.is_valid(): raise ParseError(dict(form.errors.items())) qs = form.cleaned_data dimensions = {'app-id': app.id} if 'dimensions' in stat: for key, default in stat['dimensions'].items(): val = request.GET.get(key, default) if val is not None: # Avoid passing kwargs to the monolith client when the # dimension is None to avoid facet filters being applied. dimensions[key] = request.GET.get(key, default) return Response(_get_monolith_data(stat, qs.get('start'), qs.get('end'), qs.get('interval'), dimensions)) class TransactionAPI(CORSMixin, APIView): """ API to query by transaction ID. Note: This is intended for Monolith to be able to associate a Solitude transaction with an app and price tier amount in USD. """ authentication_classes = (RestOAuthAuthentication, RestSharedSecretAuthentication) cors_allowed_methods = ['get'] permission_classes = (partial(PermissionAuthorization, 'RevenueStats', 'View'),) def get(self, request, transaction_id): try: contrib = (Contribution.objects.select_related('price_tier'). get(transaction_id=transaction_id)) except Contribution.DoesNotExist: raise http.Http404('No transaction by that ID.') data = { 'id': transaction_id, 'app_id': contrib.addon_id, 'amount_USD': contrib.price_tier.price, 'type': amo.CONTRIB_TYPES[contrib.type], } return Response(data)
{ "repo_name": "Joergen/zamboni", "path": "mkt/stats/api.py", "copies": "1", "size": "8746", "license": "bsd-3-clause", "hash": 4795984361605883000, "line_mean": 34.4089068826, "line_max": 79, "alpha_frac": 0.5915847244, "autogenerated": false, "ratio": 4.170720076299475, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5262304800699475, "avg_score": null, "num_lines": null }
from functools import partial from django.template.loader import render_to_string from django.core.urlresolvers import reverse from django import template import pytz from django.utils.html import escape from django.utils.translation import ugettext as _ from couchdbkit.exceptions import ResourceNotFound from corehq import privileges from corehq.apps.cloudcare import CLOUDCARE_DEVICE_ID from corehq.apps.hqwebapp.templatetags.hq_shared_tags import toggle_enabled from corehq.apps.receiverwrapper.auth import AuthContext from corehq.apps.hqwebapp.doc_info import get_doc_info_by_id, DocInfo from corehq.apps.locations.permissions import can_edit_form_location from corehq.apps.reports.formdetails.readable import get_readable_data_for_submission from corehq import toggles from corehq.util.timezones.conversions import ServerTime from corehq.util.timezones.utils import get_timezone_for_request from couchforms.models import XFormInstance from casexml.apps.case.xform import extract_case_blocks from casexml.apps.case import const from casexml.apps.case.models import CommCareCase from casexml.apps.case.templatetags.case_tags import case_inline_display from corehq.apps.hqwebapp.templatetags.proptable_tags import ( get_tables_as_columns, get_default_definition) from django_prbac.utils import has_privilege register = template.Library() @register.simple_tag def render_form_xml(form): xml = form.get_xml() or '' return '<pre class="fancy-code prettyprint linenums"><code class="language-xml">%s</code></pre>' \ % escape(xml.replace("><", ">\n<")) @register.simple_tag def render_pretty_xml(xml): return '<pre class="fancy-code prettyprint linenums"><code class="language-xml">%s</code></pre>' \ % escape(xml.replace("><", ">\n<")) @register.simple_tag def form_inline_display(form_id, timezone=pytz.utc): if form_id: try: form = XFormInstance.get(form_id) if form: return "%s: %s" % (ServerTime(form.received_on).user_time(timezone).done().date(), form.xmlns) except ResourceNotFound: pass return "%s: %s" % (_("missing form"), form_id) return _("empty form id found") def sorted_case_update_keys(keys): """Put common @ attributes at the bottom""" return sorted(keys, key=lambda k: (k[0] == '@', k)) def sorted_form_metadata_keys(keys): def mycmp(x, y): foo = ('timeStart', 'timeEnd') bar = ('username', 'userID') if x in foo and y in foo: return -1 if foo.index(x) == 0 else 1 elif x in foo or y in foo: return 0 if x in bar and y in bar: return -1 if bar.index(x) == 0 else 1 elif x in bar and y in bar: return 0 return cmp(x, y) return sorted(keys, cmp=mycmp) @register.simple_tag def render_form(form, domain, options): """ Uses options since Django 1.3 doesn't seem to support templatetag kwargs. Change to kwargs when we're on a version of Django that does. """ timezone = get_timezone_for_request() case_id = options.get('case_id') side_pane = options.get('side_pane', False) user = options.get('user', None) _get_tables_as_columns = partial(get_tables_as_columns, timezone=timezone) # Form Data tab form_data, question_list_not_found = get_readable_data_for_submission(form) # Case Changes tab case_blocks = extract_case_blocks(form) for i, block in enumerate(list(case_blocks)): if case_id and block.get(const.CASE_ATTR_ID) == case_id: case_blocks.pop(i) case_blocks.insert(0, block) cases = [] for b in case_blocks: this_case_id = b.get(const.CASE_ATTR_ID) try: this_case = CommCareCase.get(this_case_id) if this_case_id else None valid_case = True except ResourceNotFound: this_case = None valid_case = False if this_case and this_case._id: url = reverse('case_details', args=[domain, this_case._id]) else: url = "#" definition = get_default_definition( sorted_case_update_keys(b.keys()), assume_phonetimes=(not form.metadata or (form.metadata.deviceID != CLOUDCARE_DEVICE_ID)), ) cases.append({ "is_current_case": case_id and this_case_id == case_id, "name": case_inline_display(this_case), "table": _get_tables_as_columns(b, definition), "url": url, "valid_case": valid_case }) # Form Metadata tab meta = form.top_level_tags().get('meta', None) or {} definition = get_default_definition(sorted_form_metadata_keys(meta.keys())) form_meta_data = _get_tables_as_columns(meta, definition) if 'auth_context' in form: auth_context = AuthContext(form.auth_context) auth_context_user_id = auth_context.user_id auth_user_info = get_doc_info_by_id(domain, auth_context_user_id) else: auth_user_info = get_doc_info_by_id(domain, None) auth_context = AuthContext( user_id=None, authenticated=False, domain=domain, ) meta_userID = meta.get('userID') meta_username = meta.get('username') if meta_userID == 'demo_user': user_info = DocInfo( domain=domain, display='demo_user', ) elif meta_username == 'admin': user_info = DocInfo( domain=domain, display='admin', ) else: user_info = get_doc_info_by_id(domain, meta_userID) request = options.get('request', None) user_can_edit = ( request and user and request.domain and (user.can_edit_data() or user.is_commcare_user()) ) show_edit_options = ( user_can_edit and can_edit_form_location(domain, user, form) ) show_edit_submission = ( user_can_edit and has_privilege(request, privileges.CLOUDCARE) and toggle_enabled(request, toggles.EDIT_SUBMISSIONS) and form.doc_type != 'XFormDeprecated' ) show_resave = ( user_can_edit and toggle_enabled(request, toggles.SUPPORT) ) def _get_edit_info(instance): info = { 'was_edited': False, 'is_edit': False, } if instance.doc_type == "XFormDeprecated": info.update({ 'was_edited': True, 'latest_version': instance.orig_id, }) if getattr(instance, 'edited_on', None): info.update({ 'is_edit': True, 'edited_on': instance.edited_on, 'previous_version': instance.deprecated_form_id }) return info return render_to_string("reports/form/partials/single_form.html", { "context_case_id": case_id, "instance": form, "is_archived": form.doc_type == "XFormArchived", "edit_info": _get_edit_info(form), "domain": domain, 'question_list_not_found': question_list_not_found, "form_data": form_data, "cases": cases, "form_table_options": { # todo: wells if display config has more than one column "put_loners_in_wells": False }, "form_meta_data": form_meta_data, "auth_context": auth_context, "auth_user_info": auth_user_info, "user_info": user_info, "side_pane": side_pane, "show_edit_options": show_edit_options, "show_edit_submission": show_edit_submission, "show_resave": show_resave, })
{ "repo_name": "puttarajubr/commcare-hq", "path": "corehq/apps/reports/templatetags/xform_tags.py", "copies": "1", "size": "7692", "license": "bsd-3-clause", "hash": 6067610274751295000, "line_mean": 33.0353982301, "line_max": 110, "alpha_frac": 0.615574623, "autogenerated": false, "ratio": 3.611267605633803, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4726842228633803, "avg_score": null, "num_lines": null }
from functools import partial from django.test import TestCase from django.utils.safestring import SafeString from wagtail.admin import compare from wagtail.core.blocks import StreamValue from wagtail.images import get_image_model from wagtail.images.tests.utils import get_test_image_file from wagtail.tests.testapp.models import ( AdvertWithCustomPrimaryKey, EventCategory, EventPage, EventPageSpeaker, HeadCountRelatedModelUsingPK, SimplePage, SnippetChooserModelWithCustomPrimaryKey, StreamPage, TaggedPage) class TestFieldComparison(TestCase): comparison_class = compare.FieldComparison def test_hasnt_changed(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content="Content"), SimplePage(content="Content"), ) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Content") self.assertEqual(comparison.htmldiff(), 'Content') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertFalse(comparison.has_changed()) def test_has_changed(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content="Original content"), SimplePage(content="Modified content"), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original content</span><span class="addition">Modified content</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_htmldiff_escapes_value(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content='Original content'), SimplePage(content='<script type="text/javascript">doSomethingBad();</script>'), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original content</span><span class="addition">&lt;script type=&quot;text/javascript&quot;&gt;doSomethingBad();&lt;/script&gt;</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) class TestTextFieldComparison(TestFieldComparison): comparison_class = compare.TextFieldComparison # Only change from FieldComparison is the HTML diff is performed on words # instead of the whole field value. def test_has_changed(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content="Original content"), SimplePage(content="Modified content"), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original</span><span class="addition">Modified</span> content') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_from_none_to_value_only_shows_addition(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content=None), SimplePage(content="Added content") ) self.assertEqual(comparison.htmldiff(), '<span class="addition">Added content</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_from_value_to_none_only_shows_deletion(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content="Removed content"), SimplePage(content=None) ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Removed content</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) class TestRichTextFieldComparison(TestFieldComparison): comparison_class = compare.RichTextFieldComparison # Only change from FieldComparison is the HTML diff is performed on words # instead of the whole field value. def test_has_changed(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content="Original content"), SimplePage(content="Modified content"), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original</span><span class="addition">Modified</span> content') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) # Only change from FieldComparison is that this comparison disregards HTML tags def test_has_changed_html(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content="<b>Original</b> content"), SimplePage(content="Modified <i>content</i>"), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original</span><span class="addition">Modified</span> content') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_htmldiff_escapes_value(self): # Need to override this one as the HTML tags are stripped by RichTextFieldComparison comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content='Original content'), SimplePage(content='<script type="text/javascript">doSomethingBad();</script>'), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original content</span><span class="addition">doSomethingBad();</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) class TestStreamFieldComparison(TestCase): comparison_class = compare.StreamFieldComparison def test_hasnt_changed(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ])), ) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Body") self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertFalse(comparison.has_changed()) def test_has_changed(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Original content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', "Modified content", '1'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original</span><span class="addition">Modified</span> content</div>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_add_block(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ('text', "New Content", '2'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>\n<div class="comparison__child-object addition">New Content</div>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_delete_block(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ('text', "Content Foo", '2'), ('text', "Content Bar", '3'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ('text', "Content Bar", '3'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>\n<div class="comparison__child-object deletion">Content Foo</div>\n<div class="comparison__child-object">Content Bar</div>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_edit_block(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ('text', "Content Foo", '2'), ('text', "Content Bar", '3'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ('text', "Content Baz", '2'), ('text', "Content Bar", '3'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>\n<div class="comparison__child-object">Content <span class="deletion">Foo</span><span class="addition">Baz</span></div>\n<div class="comparison__child-object">Content Bar</div>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_has_changed_richtext(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('rich_text', "<b>Original</b> content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('rich_text', "Modified <i>content</i>", '1'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original</span><span class="addition">Modified</span> content</div>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_htmldiff_escapes_value(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Original content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', '<script type="text/javascript">doSomethingBad();</script>', '1'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original content</span><span class="addition">&lt;script type=&quot;text/javascript&quot;&gt;doSomethingBad();&lt;/script&gt;</span></div>') self.assertIsInstance(comparison.htmldiff(), SafeString) def test_htmldiff_escapes_value_richtext(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('rich_text', "Original content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('rich_text', '<script type="text/javascript">doSomethingBad();</script>', '1'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original content</span><span class="addition">doSomethingBad();</span></div>') self.assertIsInstance(comparison.htmldiff(), SafeString) def test_compare_structblock(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('product', {'name': 'a packet of rolos', 'price': '75p'}, '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('product', {'name': 'a packet of rolos', 'price': '85p'}, '1'), ])), ) expected = """ <div class="comparison__child-object"><dl> <dt>Name</dt> <dd>a packet of rolos</dd> <dt>Price</dt> <dd><span class="deletion">75p</span><span class="addition">85p</span></dd> </dl></div> """ self.assertHTMLEqual(comparison.htmldiff(), expected) self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_compare_imagechooserblock(self): image_model = get_image_model() test_image_1 = image_model.objects.create( title="Test image 1", file=get_test_image_file(), ) test_image_2 = image_model.objects.create( title="Test image 2", file=get_test_image_file(), ) field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('image', test_image_1, '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('image', test_image_2, '1'), ])), ) result = comparison.htmldiff() self.assertIn('<div class="preview-image deletion">', result) self.assertIn('alt="Test image 1"', result) self.assertIn('<div class="preview-image addition">', result) self.assertIn('alt="Test image 2"', result) self.assertIsInstance(result, SafeString) self.assertTrue(comparison.has_changed()) class TestChoiceFieldComparison(TestCase): comparison_class = compare.ChoiceFieldComparison def test_hasnt_changed(self): comparison = self.comparison_class( EventPage._meta.get_field('audience'), EventPage(audience="public"), EventPage(audience="public"), ) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Audience") self.assertEqual(comparison.htmldiff(), 'Public') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertFalse(comparison.has_changed()) def test_has_changed(self): comparison = self.comparison_class( EventPage._meta.get_field('audience'), EventPage(audience="public"), EventPage(audience="private"), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Public</span><span class="addition">Private</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_from_none_to_value_only_shows_addition(self): comparison = self.comparison_class( EventPage._meta.get_field('audience'), EventPage(audience=None), EventPage(audience="private"), ) self.assertEqual(comparison.htmldiff(), '<span class="addition">Private</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) def test_from_value_to_none_only_shows_deletion(self): comparison = self.comparison_class( EventPage._meta.get_field('audience'), EventPage(audience="public"), EventPage(audience=None), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Public</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) class TestTagsFieldComparison(TestCase): comparison_class = compare.TagsFieldComparison def test_hasnt_changed(self): a = TaggedPage() a.tags.add('wagtail') a.tags.add('bird') b = TaggedPage() b.tags.add('wagtail') b.tags.add('bird') comparison = self.comparison_class(TaggedPage._meta.get_field('tags'), a, b) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Tags") self.assertEqual(comparison.htmldiff(), 'wagtail, bird') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertFalse(comparison.has_changed()) def test_has_changed(self): a = TaggedPage() a.tags.add('wagtail') a.tags.add('bird') b = TaggedPage() b.tags.add('wagtail') b.tags.add('motacilla') comparison = self.comparison_class(TaggedPage._meta.get_field('tags'), a, b) self.assertEqual(comparison.htmldiff(), 'wagtail, <span class="deletion">bird</span>, <span class="addition">motacilla</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) class TestM2MFieldComparison(TestCase): fixtures = ['test.json'] comparison_class = compare.M2MFieldComparison def setUp(self): self.meetings_category = EventCategory.objects.create(name='Meetings') self.parties_category = EventCategory.objects.create(name='Parties') self.holidays_category = EventCategory.objects.create(name='Holidays') def test_hasnt_changed(self): christmas_event = EventPage.objects.get(url_path='/home/events/christmas/') saint_patrick_event = EventPage.objects.get(url_path='/home/events/saint-patrick/') christmas_event.categories = [self.meetings_category, self.parties_category] saint_patrick_event.categories = [self.meetings_category, self.parties_category] comparison = self.comparison_class( EventPage._meta.get_field('categories'), christmas_event, saint_patrick_event ) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Categories") self.assertFalse(comparison.has_changed()) self.assertEqual(comparison.htmldiff(), 'Meetings, Parties') self.assertIsInstance(comparison.htmldiff(), SafeString) def test_has_changed(self): christmas_event = EventPage.objects.get(url_path='/home/events/christmas/') saint_patrick_event = EventPage.objects.get(url_path='/home/events/saint-patrick/') christmas_event.categories = [self.meetings_category, self.parties_category] saint_patrick_event.categories = [self.meetings_category, self.holidays_category] comparison = self.comparison_class( EventPage._meta.get_field('categories'), christmas_event, saint_patrick_event ) self.assertTrue(comparison.has_changed()) self.assertEqual(comparison.htmldiff(), 'Meetings, <span class="deletion">Parties</span>, <span class="addition">Holidays</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) class TestForeignObjectComparison(TestCase): comparison_class = compare.ForeignObjectComparison @classmethod def setUpTestData(cls): image_model = get_image_model() cls.test_image_1 = image_model.objects.create( title="Test image 1", file=get_test_image_file(), ) cls.test_image_2 = image_model.objects.create( title="Test image 2", file=get_test_image_file(), ) def test_hasnt_changed(self): comparison = self.comparison_class( EventPage._meta.get_field('feed_image'), EventPage(feed_image=self.test_image_1), EventPage(feed_image=self.test_image_1), ) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Feed image") self.assertEqual(comparison.htmldiff(), 'Test image 1') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertFalse(comparison.has_changed()) def test_has_changed(self): comparison = self.comparison_class( EventPage._meta.get_field('feed_image'), EventPage(feed_image=self.test_image_1), EventPage(feed_image=self.test_image_2), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Test image 1</span><span class="addition">Test image 2</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) class TestForeignObjectComparisonWithCustomPK(TestCase): """ForeignObjectComparison works with models declaring a custom primary key field""" comparison_class = compare.ForeignObjectComparison @classmethod def setUpTestData(cls): ad1 = AdvertWithCustomPrimaryKey.objects.create( advert_id='ad1', text='Advert 1' ) ad2 = AdvertWithCustomPrimaryKey.objects.create( advert_id='ad2', text='Advert 2' ) cls.test_obj_1 = SnippetChooserModelWithCustomPrimaryKey.objects.create( advertwithcustomprimarykey=ad1 ) cls.test_obj_2 = SnippetChooserModelWithCustomPrimaryKey.objects.create( advertwithcustomprimarykey=ad2 ) def test_hasnt_changed(self): comparison = self.comparison_class( SnippetChooserModelWithCustomPrimaryKey._meta.get_field('advertwithcustomprimarykey'), self.test_obj_1, self.test_obj_1, ) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), 'Advertwithcustomprimarykey') self.assertEqual(comparison.htmldiff(), 'Advert 1') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertFalse(comparison.has_changed()) def test_has_changed(self): comparison = self.comparison_class( SnippetChooserModelWithCustomPrimaryKey._meta.get_field('advertwithcustomprimarykey'), self.test_obj_1, self.test_obj_2, ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Advert 1</span><span class="addition">Advert 2</span>') self.assertIsInstance(comparison.htmldiff(), SafeString) self.assertTrue(comparison.has_changed()) class TestChildRelationComparison(TestCase): field_comparison_class = compare.FieldComparison comparison_class = compare.ChildRelationComparison def test_hasnt_changed(self): # Two event pages with speaker called "Father Christmas". Neither of # the speaker objects have an ID so this tests that the code can match # the two together by field content. event_page = EventPage(title="Event page", slug="event") event_page.speakers.add(EventPageSpeaker( first_name="Father", last_name="Christmas", )) modified_event_page = EventPage(title="Event page", slug="event") modified_event_page.speakers.add(EventPageSpeaker( first_name="Father", last_name="Christmas", )) comparison = self.comparison_class( EventPage._meta.get_field('speaker'), [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Speaker") self.assertFalse(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {0: 0}) self.assertEqual(map_backwards, {0: 0}) self.assertEqual(added, []) self.assertEqual(deleted, []) def test_has_changed(self): # Father Christmas renamed to Santa Claus. And Father Ted added. # Father Christmas should be mapped to Father Ted because they # are most alike. Santa claus should be displayed as "new" event_page = EventPage(title="Event page", slug="event") event_page.speakers.add(EventPageSpeaker( first_name="Father", last_name="Christmas", sort_order=0, )) modified_event_page = EventPage(title="Event page", slug="event") modified_event_page.speakers.add(EventPageSpeaker( first_name="Santa", last_name="Claus", sort_order=0, )) modified_event_page.speakers.add(EventPageSpeaker( first_name="Father", last_name="Ted", sort_order=1, )) comparison = self.comparison_class( EventPage._meta.get_field('speaker'), [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Speaker") self.assertTrue(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {0: 1}) # Map Father Christmas to Father Ted self.assertEqual(map_backwards, {1: 0}) # Map Father Ted ot Father Christmas self.assertEqual(added, [0]) # Add Santa Claus self.assertEqual(deleted, []) def test_has_changed_with_same_id(self): # Father Christmas renamed to Santa Claus, but this time the ID of the # child object remained the same. It should now be detected as the same # object event_page = EventPage(title="Event page", slug="event") event_page.speakers.add(EventPageSpeaker( id=1, first_name="Father", last_name="Christmas", sort_order=0, )) modified_event_page = EventPage(title="Event page", slug="event") modified_event_page.speakers.add(EventPageSpeaker( id=1, first_name="Santa", last_name="Claus", sort_order=0, )) modified_event_page.speakers.add(EventPageSpeaker( first_name="Father", last_name="Ted", sort_order=1, )) comparison = self.comparison_class( EventPage._meta.get_field('speaker'), [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Speaker") self.assertTrue(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {0: 0}) # Map Father Christmas to Santa Claus self.assertEqual(map_backwards, {0: 0}) # Map Santa Claus to Father Christmas self.assertEqual(added, [1]) # Add Father Ted self.assertEqual(deleted, []) def test_hasnt_changed_with_different_id(self): # Both of the child objects have the same field content but have a # different ID so they should be detected as separate objects event_page = EventPage(title="Event page", slug="event") event_page.speakers.add(EventPageSpeaker( id=1, first_name="Father", last_name="Christmas", )) modified_event_page = EventPage(title="Event page", slug="event") modified_event_page.speakers.add(EventPageSpeaker( id=2, first_name="Father", last_name="Christmas", )) comparison = self.comparison_class( EventPage._meta.get_field('speaker'), [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Speaker") self.assertTrue(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {}) self.assertEqual(map_backwards, {}) self.assertEqual(added, [0]) # Add new Father Christmas self.assertEqual(deleted, [0]) # Delete old Father Christmas class TestChildObjectComparison(TestCase): field_comparison_class = compare.FieldComparison comparison_class = compare.ChildObjectComparison def test_same_object(self): obj_a = EventPageSpeaker( first_name="Father", last_name="Christmas", ) obj_b = EventPageSpeaker( first_name="Father", last_name="Christmas", ) comparison = self.comparison_class( EventPageSpeaker, [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], obj_a, obj_b, ) self.assertFalse(comparison.is_addition()) self.assertFalse(comparison.is_deletion()) self.assertFalse(comparison.has_changed()) self.assertEqual(comparison.get_position_change(), 0) self.assertEqual(comparison.get_num_differences(), 0) def test_different_object(self): obj_a = EventPageSpeaker( first_name="Father", last_name="Christmas", ) obj_b = EventPageSpeaker( first_name="Santa", last_name="Claus", ) comparison = self.comparison_class( EventPageSpeaker, [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], obj_a, obj_b, ) self.assertFalse(comparison.is_addition()) self.assertFalse(comparison.is_deletion()) self.assertTrue(comparison.has_changed()) self.assertEqual(comparison.get_position_change(), 0) self.assertEqual(comparison.get_num_differences(), 2) def test_moved_object(self): obj_a = EventPageSpeaker( first_name="Father", last_name="Christmas", sort_order=1, ) obj_b = EventPageSpeaker( first_name="Father", last_name="Christmas", sort_order=5, ) comparison = self.comparison_class( EventPageSpeaker, [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], obj_a, obj_b, ) self.assertFalse(comparison.is_addition()) self.assertFalse(comparison.is_deletion()) self.assertFalse(comparison.has_changed()) self.assertEqual(comparison.get_position_change(), 4) self.assertEqual(comparison.get_num_differences(), 0) def test_addition(self): obj = EventPageSpeaker( first_name="Father", last_name="Christmas", ) comparison = self.comparison_class( EventPageSpeaker, [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], None, obj, ) self.assertTrue(comparison.is_addition()) self.assertFalse(comparison.is_deletion()) self.assertFalse(comparison.has_changed()) self.assertIsNone(comparison.get_position_change(), 0) self.assertEqual(comparison.get_num_differences(), 0) def test_deletion(self): obj = EventPageSpeaker( first_name="Father", last_name="Christmas", ) comparison = self.comparison_class( EventPageSpeaker, [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], obj, None, ) self.assertFalse(comparison.is_addition()) self.assertTrue(comparison.is_deletion()) self.assertFalse(comparison.has_changed()) self.assertIsNone(comparison.get_position_change()) self.assertEqual(comparison.get_num_differences(), 0) class TestChildRelationComparisonUsingPK(TestCase): """Test related objects can be compred if they do not use id for primary key""" field_comparison_class = compare.FieldComparison comparison_class = compare.ChildRelationComparison def test_has_changed_with_same_id(self): # Head Count was changed but the PK of the child object remained the same. # It should be detected as the same object event_page = EventPage(title="Semi Finals", slug="semi-finals-2018") event_page.head_counts.add(HeadCountRelatedModelUsingPK( custom_id=1, head_count=22, )) modified_event_page = EventPage(title="Semi Finals", slug="semi-finals-2018") modified_event_page.head_counts.add(HeadCountRelatedModelUsingPK( custom_id=1, head_count=23, )) modified_event_page.head_counts.add(HeadCountRelatedModelUsingPK( head_count=25, )) comparison = self.comparison_class( EventPage._meta.get_field('head_counts'), [partial(self.field_comparison_class, HeadCountRelatedModelUsingPK._meta.get_field('head_count'))], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), 'Head counts') self.assertTrue(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {0: 0}) # map head count 22 to 23 self.assertEqual(map_backwards, {0: 0}) # map head count 23 to 22 self.assertEqual(added, [1]) # add second head count self.assertEqual(deleted, []) def test_hasnt_changed_with_different_id(self): # Both of the child objects have the same field content but have a # different PK (ID) so they should be detected as separate objects event_page = EventPage(title="Finals", slug="finals-event-abc") event_page.head_counts.add(HeadCountRelatedModelUsingPK( custom_id=1, head_count=220 )) modified_event_page = EventPage(title="Finals", slug="finals-event-abc") modified_event_page.head_counts.add(HeadCountRelatedModelUsingPK( custom_id=2, head_count=220 )) comparison = self.comparison_class( EventPage._meta.get_field('head_counts'), [partial(self.field_comparison_class, HeadCountRelatedModelUsingPK._meta.get_field('head_count'))], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Head counts") self.assertTrue(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {}) self.assertEqual(map_backwards, {}) self.assertEqual(added, [0]) # Add new head count self.assertEqual(deleted, [0]) # Delete old head count
{ "repo_name": "nimasmi/wagtail", "path": "wagtail/admin/tests/test_compare.py", "copies": "3", "size": "38339", "license": "bsd-3-clause", "hash": -5496666906605017000, "line_mean": 39.1455497382, "line_max": 279, "alpha_frac": 0.6222906179, "autogenerated": false, "ratio": 4.130022621997199, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0008977225332592987, "num_lines": 955 }
from functools import partial from django.test import TestCase from django.utils.safestring import SafeText from wagtail.admin import compare from wagtail.core.blocks import StreamValue from wagtail.images import get_image_model from wagtail.images.tests.utils import get_test_image_file from wagtail.tests.testapp.models import ( EventCategory, EventPage, EventPageSpeaker, HeadCountRelatedModelUsingPK, SimplePage, StreamPage, TaggedPage) class TestFieldComparison(TestCase): comparison_class = compare.FieldComparison def test_hasnt_changed(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content="Content"), SimplePage(content="Content"), ) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Content") self.assertEqual(comparison.htmldiff(), 'Content') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertFalse(comparison.has_changed()) def test_has_changed(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content="Original content"), SimplePage(content="Modified content"), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original content</span><span class="addition">Modified content</span>') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) def test_htmldiff_escapes_value(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content='Original content'), SimplePage(content='<script type="text/javascript">doSomethingBad();</script>'), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original content</span><span class="addition">&lt;script type=&quot;text/javascript&quot;&gt;doSomethingBad();&lt;/script&gt;</span>') self.assertIsInstance(comparison.htmldiff(), SafeText) class TestTextFieldComparison(TestFieldComparison): comparison_class = compare.TextFieldComparison # Only change from FieldComparison is the HTML diff is performed on words # instead of the whole field value. def test_has_changed(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content="Original content"), SimplePage(content="Modified content"), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original</span><span class="addition">Modified</span> content') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) class TestRichTextFieldComparison(TestTextFieldComparison): comparison_class = compare.RichTextFieldComparison # Only change from FieldComparison is that this comparison disregards HTML tags def test_has_changed_html(self): comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content="<b>Original</b> content"), SimplePage(content="Modified <i>content</i>"), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original</span><span class="addition">Modified</span> content') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) def test_htmldiff_escapes_value(self): # Need to override this one as the HTML tags are stripped by RichTextFieldComparison comparison = self.comparison_class( SimplePage._meta.get_field('content'), SimplePage(content='Original content'), SimplePage(content='<script type="text/javascript">doSomethingBad();</script>'), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original content</span><span class="addition">doSomethingBad();</span>') self.assertIsInstance(comparison.htmldiff(), SafeText) class TestStreamFieldComparison(TestCase): comparison_class = compare.StreamFieldComparison def test_hasnt_changed(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ])), ) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Body") self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertFalse(comparison.has_changed()) def test_has_changed(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Original content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', "Modified content", '1'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original</span><span class="addition">Modified</span> content</div>') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) def test_add_block(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ('text', "New Content", '2'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>\n<div class="comparison__child-object addition">New Content</div>') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) def test_delete_block(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ('text', "Content Foo", '2'), ('text', "Content Bar", '3'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ('text', "Content Bar", '3'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>\n<div class="comparison__child-object deletion">Content Foo</div>\n<div class="comparison__child-object">Content Bar</div>') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) def test_edit_block(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ('text', "Content Foo", '2'), ('text', "Content Bar", '3'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', "Content", '1'), ('text', "Content Baz", '2'), ('text', "Content Bar", '3'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>\n<div class="comparison__child-object">Content <span class="deletion">Foo</span><span class="addition">Baz</span></div>\n<div class="comparison__child-object">Content Bar</div>') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) def test_has_changed_richtext(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('rich_text', "<b>Original</b> content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('rich_text', "Modified <i>content</i>", '1'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original</span><span class="addition">Modified</span> content</div>') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) def test_htmldiff_escapes_value(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('text', "Original content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('text', '<script type="text/javascript">doSomethingBad();</script>', '1'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original content</span><span class="addition">&lt;script type=&quot;text/javascript&quot;&gt;doSomethingBad();&lt;/script&gt;</span></div>') self.assertIsInstance(comparison.htmldiff(), SafeText) def test_htmldiff_escapes_value_richtext(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('rich_text', "Original content", '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('rich_text', '<script type="text/javascript">doSomethingBad();</script>', '1'), ])), ) self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original content</span><span class="addition">doSomethingBad();</span></div>') self.assertIsInstance(comparison.htmldiff(), SafeText) def test_compare_structblock(self): field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('product', {'name': 'a packet of rolos', 'price': '75p'}, '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('product', {'name': 'a packet of rolos', 'price': '85p'}, '1'), ])), ) expected = """ <div class="comparison__child-object"><dl> <dt>Name</dt> <dd>a packet of rolos</dd> <dt>Price</dt> <dd><span class="deletion">75p</span><span class="addition">85p</span></dd> </dl></div> """ self.assertHTMLEqual(comparison.htmldiff(), expected) self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) def test_compare_imagechooserblock(self): image_model = get_image_model() test_image_1 = image_model.objects.create( title="Test image 1", file=get_test_image_file(), ) test_image_2 = image_model.objects.create( title="Test image 2", file=get_test_image_file(), ) field = StreamPage._meta.get_field('body') comparison = self.comparison_class( field, StreamPage(body=StreamValue(field.stream_block, [ ('image', test_image_1, '1'), ])), StreamPage(body=StreamValue(field.stream_block, [ ('image', test_image_2, '1'), ])), ) result = comparison.htmldiff() self.assertIn('<div class="preview-image deletion">', result) self.assertIn('alt="Test image 1"', result) self.assertIn('<div class="preview-image addition">', result) self.assertIn('alt="Test image 2"', result) self.assertIsInstance(result, SafeText) self.assertTrue(comparison.has_changed()) class TestChoiceFieldComparison(TestCase): comparison_class = compare.ChoiceFieldComparison def test_hasnt_changed(self): comparison = self.comparison_class( EventPage._meta.get_field('audience'), EventPage(audience="public"), EventPage(audience="public"), ) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Audience") self.assertEqual(comparison.htmldiff(), 'Public') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertFalse(comparison.has_changed()) def test_has_changed(self): comparison = self.comparison_class( EventPage._meta.get_field('audience'), EventPage(audience="public"), EventPage(audience="private"), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Public</span><span class="addition">Private</span>') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) class TestTagsFieldComparison(TestCase): comparison_class = compare.TagsFieldComparison def test_hasnt_changed(self): a = TaggedPage() a.tags.add('wagtail') a.tags.add('bird') b = TaggedPage() b.tags.add('wagtail') b.tags.add('bird') comparison = self.comparison_class(TaggedPage._meta.get_field('tags'), a, b) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Tags") self.assertEqual(comparison.htmldiff(), 'wagtail, bird') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertFalse(comparison.has_changed()) def test_has_changed(self): a = TaggedPage() a.tags.add('wagtail') a.tags.add('bird') b = TaggedPage() b.tags.add('wagtail') b.tags.add('motacilla') comparison = self.comparison_class(TaggedPage._meta.get_field('tags'), a, b) self.assertEqual(comparison.htmldiff(), 'wagtail, <span class="deletion">bird</span>, <span class="addition">motacilla</span>') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) class TestM2MFieldComparison(TestCase): fixtures = ['test.json'] comparison_class = compare.M2MFieldComparison def setUp(self): self.meetings_category = EventCategory.objects.create(name='Meetings') self.parties_category = EventCategory.objects.create(name='Parties') self.holidays_category = EventCategory.objects.create(name='Holidays') def test_hasnt_changed(self): christmas_event = EventPage.objects.get(url_path='/home/events/christmas/') saint_patrick_event = EventPage.objects.get(url_path='/home/events/saint-patrick/') christmas_event.categories = [self.meetings_category, self.parties_category] saint_patrick_event.categories = [self.meetings_category, self.parties_category] comparison = self.comparison_class( EventPage._meta.get_field('categories'), christmas_event, saint_patrick_event ) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Categories") self.assertFalse(comparison.has_changed()) self.assertEqual(comparison.htmldiff(), 'Meetings, Parties') self.assertIsInstance(comparison.htmldiff(), SafeText) def test_has_changed(self): christmas_event = EventPage.objects.get(url_path='/home/events/christmas/') saint_patrick_event = EventPage.objects.get(url_path='/home/events/saint-patrick/') christmas_event.categories = [self.meetings_category, self.parties_category] saint_patrick_event.categories = [self.meetings_category, self.holidays_category] comparison = self.comparison_class( EventPage._meta.get_field('categories'), christmas_event, saint_patrick_event ) self.assertTrue(comparison.has_changed()) self.assertEqual(comparison.htmldiff(), 'Meetings, <span class="deletion">Parties</span>, <span class="addition">Holidays</span>') self.assertIsInstance(comparison.htmldiff(), SafeText) class TestForeignObjectComparison(TestCase): comparison_class = compare.ForeignObjectComparison @classmethod def setUpTestData(cls): image_model = get_image_model() cls.test_image_1 = image_model.objects.create( title="Test image 1", file=get_test_image_file(), ) cls.test_image_2 = image_model.objects.create( title="Test image 2", file=get_test_image_file(), ) def test_hasnt_changed(self): comparison = self.comparison_class( EventPage._meta.get_field('feed_image'), EventPage(feed_image=self.test_image_1), EventPage(feed_image=self.test_image_1), ) self.assertTrue(comparison.is_field) self.assertFalse(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Feed image") self.assertEqual(comparison.htmldiff(), 'Test image 1') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertFalse(comparison.has_changed()) def test_has_changed(self): comparison = self.comparison_class( EventPage._meta.get_field('feed_image'), EventPage(feed_image=self.test_image_1), EventPage(feed_image=self.test_image_2), ) self.assertEqual(comparison.htmldiff(), '<span class="deletion">Test image 1</span><span class="addition">Test image 2</span>') self.assertIsInstance(comparison.htmldiff(), SafeText) self.assertTrue(comparison.has_changed()) class TestChildRelationComparison(TestCase): field_comparison_class = compare.FieldComparison comparison_class = compare.ChildRelationComparison def test_hasnt_changed(self): # Two event pages with speaker called "Father Christmas". Neither of # the speaker objects have an ID so this tests that the code can match # the two together by field content. event_page = EventPage(title="Event page", slug="event") event_page.speakers.add(EventPageSpeaker( first_name="Father", last_name="Christmas", )) modified_event_page = EventPage(title="Event page", slug="event") modified_event_page.speakers.add(EventPageSpeaker( first_name="Father", last_name="Christmas", )) comparison = self.comparison_class( EventPage._meta.get_field('speaker'), [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Speaker") self.assertFalse(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {0: 0}) self.assertEqual(map_backwards, {0: 0}) self.assertEqual(added, []) self.assertEqual(deleted, []) def test_has_changed(self): # Father Christmas renamed to Santa Claus. And Father Ted added. # Father Christmas should be mapped to Father Ted because they # are most alike. Santa claus should be displayed as "new" event_page = EventPage(title="Event page", slug="event") event_page.speakers.add(EventPageSpeaker( first_name="Father", last_name="Christmas", sort_order=0, )) modified_event_page = EventPage(title="Event page", slug="event") modified_event_page.speakers.add(EventPageSpeaker( first_name="Santa", last_name="Claus", sort_order=0, )) modified_event_page.speakers.add(EventPageSpeaker( first_name="Father", last_name="Ted", sort_order=1, )) comparison = self.comparison_class( EventPage._meta.get_field('speaker'), [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Speaker") self.assertTrue(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {0: 1}) # Map Father Christmas to Father Ted self.assertEqual(map_backwards, {1: 0}) # Map Father Ted ot Father Christmas self.assertEqual(added, [0]) # Add Santa Claus self.assertEqual(deleted, []) def test_has_changed_with_same_id(self): # Father Christmas renamed to Santa Claus, but this time the ID of the # child object remained the same. It should now be detected as the same # object event_page = EventPage(title="Event page", slug="event") event_page.speakers.add(EventPageSpeaker( id=1, first_name="Father", last_name="Christmas", sort_order=0, )) modified_event_page = EventPage(title="Event page", slug="event") modified_event_page.speakers.add(EventPageSpeaker( id=1, first_name="Santa", last_name="Claus", sort_order=0, )) modified_event_page.speakers.add(EventPageSpeaker( first_name="Father", last_name="Ted", sort_order=1, )) comparison = self.comparison_class( EventPage._meta.get_field('speaker'), [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Speaker") self.assertTrue(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {0: 0}) # Map Father Christmas to Santa Claus self.assertEqual(map_backwards, {0: 0}) # Map Santa Claus to Father Christmas self.assertEqual(added, [1]) # Add Father Ted self.assertEqual(deleted, []) def test_hasnt_changed_with_different_id(self): # Both of the child objects have the same field content but have a # different ID so they should be detected as separate objects event_page = EventPage(title="Event page", slug="event") event_page.speakers.add(EventPageSpeaker( id=1, first_name="Father", last_name="Christmas", )) modified_event_page = EventPage(title="Event page", slug="event") modified_event_page.speakers.add(EventPageSpeaker( id=2, first_name="Father", last_name="Christmas", )) comparison = self.comparison_class( EventPage._meta.get_field('speaker'), [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Speaker") self.assertTrue(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {}) self.assertEqual(map_backwards, {}) self.assertEqual(added, [0]) # Add new Father Christmas self.assertEqual(deleted, [0]) # Delete old Father Christmas class TestChildObjectComparison(TestCase): field_comparison_class = compare.FieldComparison comparison_class = compare.ChildObjectComparison def test_same_object(self): obj_a = EventPageSpeaker( first_name="Father", last_name="Christmas", ) obj_b = EventPageSpeaker( first_name="Father", last_name="Christmas", ) comparison = self.comparison_class( EventPageSpeaker, [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], obj_a, obj_b, ) self.assertFalse(comparison.is_addition()) self.assertFalse(comparison.is_deletion()) self.assertFalse(comparison.has_changed()) self.assertEqual(comparison.get_position_change(), 0) self.assertEqual(comparison.get_num_differences(), 0) def test_different_object(self): obj_a = EventPageSpeaker( first_name="Father", last_name="Christmas", ) obj_b = EventPageSpeaker( first_name="Santa", last_name="Claus", ) comparison = self.comparison_class( EventPageSpeaker, [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], obj_a, obj_b, ) self.assertFalse(comparison.is_addition()) self.assertFalse(comparison.is_deletion()) self.assertTrue(comparison.has_changed()) self.assertEqual(comparison.get_position_change(), 0) self.assertEqual(comparison.get_num_differences(), 2) def test_moved_object(self): obj_a = EventPageSpeaker( first_name="Father", last_name="Christmas", sort_order=1, ) obj_b = EventPageSpeaker( first_name="Father", last_name="Christmas", sort_order=5, ) comparison = self.comparison_class( EventPageSpeaker, [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], obj_a, obj_b, ) self.assertFalse(comparison.is_addition()) self.assertFalse(comparison.is_deletion()) self.assertFalse(comparison.has_changed()) self.assertEqual(comparison.get_position_change(), 4) self.assertEqual(comparison.get_num_differences(), 0) def test_addition(self): obj = EventPageSpeaker( first_name="Father", last_name="Christmas", ) comparison = self.comparison_class( EventPageSpeaker, [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], None, obj, ) self.assertTrue(comparison.is_addition()) self.assertFalse(comparison.is_deletion()) self.assertFalse(comparison.has_changed()) self.assertIsNone(comparison.get_position_change(), 0) self.assertEqual(comparison.get_num_differences(), 0) def test_deletion(self): obj = EventPageSpeaker( first_name="Father", last_name="Christmas", ) comparison = self.comparison_class( EventPageSpeaker, [ partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')), partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')), ], obj, None, ) self.assertFalse(comparison.is_addition()) self.assertTrue(comparison.is_deletion()) self.assertFalse(comparison.has_changed()) self.assertIsNone(comparison.get_position_change()) self.assertEqual(comparison.get_num_differences(), 0) class TestChildRelationComparisonUsingPK(TestCase): """Test related objects can be compred if they do not use id for primary key""" field_comparison_class = compare.FieldComparison comparison_class = compare.ChildRelationComparison def test_has_changed_with_same_id(self): # Head Count was changed but the PK of the child object remained the same. # It should be detected as the same object event_page = EventPage(title="Semi Finals", slug="semi-finals-2018") event_page.head_counts.add(HeadCountRelatedModelUsingPK( custom_id=1, head_count=22, )) modified_event_page = EventPage(title="Semi Finals", slug="semi-finals-2018") modified_event_page.head_counts.add(HeadCountRelatedModelUsingPK( custom_id=1, head_count=23, )) modified_event_page.head_counts.add(HeadCountRelatedModelUsingPK( head_count=25, )) comparison = self.comparison_class( EventPage._meta.get_field('head_counts'), [partial(self.field_comparison_class, HeadCountRelatedModelUsingPK._meta.get_field('head_count'))], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), 'Head counts') self.assertTrue(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {0: 0}) # map head count 22 to 23 self.assertEqual(map_backwards, {0: 0}) # map head count 23 to 22 self.assertEqual(added, [1]) # add second head count self.assertEqual(deleted, []) def test_hasnt_changed_with_different_id(self): # Both of the child objects have the same field content but have a # different PK (ID) so they should be detected as separate objects event_page = EventPage(title="Finals", slug="finals-event-abc") event_page.head_counts.add(HeadCountRelatedModelUsingPK( custom_id=1, head_count=220 )) modified_event_page = EventPage(title="Finals", slug="finals-event-abc") modified_event_page.head_counts.add(HeadCountRelatedModelUsingPK( custom_id=2, head_count=220 )) comparison = self.comparison_class( EventPage._meta.get_field('head_counts'), [partial(self.field_comparison_class, HeadCountRelatedModelUsingPK._meta.get_field('head_count'))], event_page, modified_event_page, ) self.assertFalse(comparison.is_field) self.assertTrue(comparison.is_child_relation) self.assertEqual(comparison.field_label(), "Head counts") self.assertTrue(comparison.has_changed()) # Check mapping objs_a = list(comparison.val_a.all()) objs_b = list(comparison.val_b.all()) map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b) self.assertEqual(map_forwards, {}) self.assertEqual(map_backwards, {}) self.assertEqual(added, [0]) # Add new head count self.assertEqual(deleted, [0]) # Delete old head count
{ "repo_name": "nealtodd/wagtail", "path": "wagtail/admin/tests/test_compare.py", "copies": "1", "size": "33922", "license": "bsd-3-clause", "hash": -4811280329643717000, "line_mean": 38.9552414605, "line_max": 279, "alpha_frac": 0.6167383999, "autogenerated": false, "ratio": 4.106779661016949, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.522351806091695, "avg_score": null, "num_lines": null }
from functools import partial from django.urls import reverse from rest_framework import status from rest_framework.test import APITestCase from core.tests.factories import UserFactory from core.tests.mixins import HTTPMethodStatusCodeTestMixin from bank_accounts.models import Account from bank_accounts.tests.factories import AccountFactory class TestAccountAPIViewsAuthorizedUser(HTTPMethodStatusCodeTestMixin, APITestCase): def setUp(self): self.urls = { 'account_list': reverse('bank-accounts-api:account-list'), 'account_detail': partial(reverse, 'bank-accounts-api:account-detail') } self.user = UserFactory() self.client.force_authenticate(self.user) self.account_required_fields = { 'read': [ 'id', 'first_name', 'last_name', 'iban' ], 'write': [ 'first_name', 'last_name', 'iban' ] } invalid_string = 'a' * 256 self.account_data = { 'valid': { 'first_name': 'Tony', 'last_name': 'Stark', 'iban': 'DE89370400440532013000' }, 'invalid': { 'first_name': invalid_string, 'last_name': invalid_string, 'iban': 'invalid' } } def assertSortedForceListEqual(self, list1, list2, msg=None): list1 = sorted(list(list1)) list2 = sorted(list(list2)) self.assertListEqual(list1, list2, msg) def test_account_list_api_view(self): """ Ensure API view will return list of accounts (contains required fields) that belongs to authenticated user. """ another_user = UserFactory() another_account_list = AccountFactory.create_batch( 5, manager=another_user ) user_account_list = AccountFactory.create_batch(5, manager=self.user) user_account_id_list = [a.id for a in user_account_list] response = self.client.get(self.urls['account_list']) self.assertEqual(response.status_code, status.HTTP_200_OK) response_account_id_list = [] for account in response.data: self.assertSortedForceListEqual( account.keys(), self.account_required_fields['read'] ) response_account_id_list.append(account['id']) self.assertSortedForceListEqual(user_account_id_list, response_account_id_list) def test_create_account_with_empty_data(self): """ Ensure API view will return appropriate errors if empty data has been sent. """ data = {} response = self.client.post(self.urls['account_list'], data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertSortedForceListEqual(response.data.keys(), self.account_required_fields['write']) self.assertFalse(Account.objects.exists()) def test_create_account_with_invalid_data(self): """ Ensure API view will return appropriate errors if invalid data has been sent. """ data = self.account_data['invalid'] response = self.client.post(self.urls['account_list'], data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertSortedForceListEqual(response.data.keys(), data.keys()) self.assertFalse(Account.objects.exists()) def test_create_account_with_valid_data(self): """ Ensure correct account will be created in DB. """ data = self.account_data['valid'] response = self.client.post(self.urls['account_list'], data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) created_account = Account.objects.get(iban=data['iban']) for field_name, value in data.items(): self.assertEqual(getattr(created_account, field_name), value) self.assertEqual(created_account.manager, self.user) def test_account_detail_api_view_owner(self): """ Ensure API view will return account (contains required fields) that belongs to authenticated user. """ account = AccountFactory(manager=self.user) url = self.urls['account_detail'](kwargs={'pk': account.pk}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertSortedForceListEqual(response.data.keys(), self.account_required_fields['read']) def test_account_detail_api_view_not_owner(self): """ Ensure API view will return 404 status code to request with methods: GET, PUT, DELETE, if requested account does not belongs to authenticated user. """ another_user = UserFactory() account = AccountFactory(manager=another_user) self.run_method_status_code_check( url=self.urls['account_detail'](kwargs={'pk': account.pk}), methods=['get', 'put', 'delete'], status_code=status.HTTP_404_NOT_FOUND ) def test_update_account_with_empty_data(self): """ Ensure account will not be modified in DB and API view will return appropriate errors if empty data has been sent. """ account = AccountFactory(manager=self.user) required_fields = self.account_required_fields['write'] data = {} url = self.urls['account_detail'](kwargs={'pk': account.pk}) response = self.client.put(url, data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertSortedForceListEqual(response.data.keys(), required_fields) account_from_db = Account.objects.get(pk=account.pk) for field in required_fields: self.assertEqual(getattr(account, field), getattr(account_from_db, field)) def test_update_account_with_invalid_data(self): """ Ensure account will not be modified in DB and API view will return appropriate errors if invalid data has been sent. """ account = AccountFactory(manager=self.user) data = self.account_data['invalid'] url = self.urls['account_detail'](kwargs={'pk': account.pk}) response = self.client.put(url, data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertSortedForceListEqual(response.data.keys(), data.keys()) account_from_db = Account.objects.get(pk=account.pk) for field_name in data.keys(): self.assertEqual(getattr(account, field_name), getattr(account_from_db, field_name)) def test_update_account_with_valid_data(self): """ Ensure account will be correctly modified in DB. """ data = self.account_data['valid'] account = AccountFactory(manager=self.user) url = self.urls['account_detail'](kwargs={'pk': account.pk}) response = self.client.put(url, data) self.assertEqual(response.status_code, status.HTTP_200_OK) account_from_db = Account.objects.get(pk=account.pk) for field_name, value in data.items(): self.assertEqual(getattr(account_from_db, field_name), value) def test_delete_account(self): """ Ensure account will be deleted from DB. """ account = AccountFactory(manager=self.user) url = self.urls['account_detail'](kwargs={'pk': account.pk}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) with self.assertRaises(Account.DoesNotExist) as _: Account.objects.get(pk=account.pk) def test_account_list_api_view_not_allowed_methods(self): """ Ensure API view will return 405 status code to request with not allowed methods: PUT, PATCH, DELETE """ self.run_method_status_code_check( url=self.urls['account_list'], methods=['put', 'patch', 'delete'], status_code=status.HTTP_405_METHOD_NOT_ALLOWED ) def test_account_detail_api_view_not_allowed_methods(self): """ Ensure API view will return 405 status code to request with not allowed PATCH method """ account = AccountFactory(manager=self.user) self.run_method_status_code_check( url=self.urls['account_detail'](kwargs={'pk': account.pk}), methods=['patch'], status_code=status.HTTP_405_METHOD_NOT_ALLOWED )
{ "repo_name": "krayevidi/IBANapp", "path": "src/backend/bank_accounts/tests/api_views_authorized_user_tests.py", "copies": "1", "size": "8885", "license": "mit", "hash": -7002952432038970000, "line_mean": 39.7568807339, "line_max": 87, "alpha_frac": 0.6048396173, "autogenerated": false, "ratio": 4.194995278564684, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5299834895864683, "avg_score": null, "num_lines": null }
from functools import partial from django.utils.translation import ugettext_lazy as _ from django.contrib import admin from mezzanine.conf import settings from mezzanine.core.admin import BaseTranslationModelAdmin from .models import BlockCategory, Block, RichBlock, ImageBlock class BlockCategoryAdmin(admin.ModelAdmin): ordering = ('title',) list_display = ('title',) search_fields = ('title',) fields = ("title",) class BlockAdmin(BaseTranslationModelAdmin): ordering = ('title', 'category') list_display = ('title', 'category', 'login_required', 'show_title') list_editable = ('login_required', 'show_title', 'category') search_fields = ('title', 'content') fieldsets = ( (None, { "fields": ["title", "content", "category"], }), (_("Advanced data"), { "fields": ['login_required', 'show_title', "slug"], "classes": ("collapse-closed",) }), ) class RichBlockAdmin(BaseTranslationModelAdmin): ordering = ('title', 'category') list_display = ('title', 'category', 'login_required', 'show_title') list_editable = ('login_required', 'show_title', 'category') search_fields = ('title', 'content') fieldsets = ( (None, { "fields": ["title", "content", "category"], }), (_("Advanced data"), { "fields": ['login_required', 'show_title', "slug"], "classes": ("collapse-closed",) }), ) class ImageBlockAdmin(BaseTranslationModelAdmin): ordering = ('title', 'category') list_display = ('admin_thumb', 'title', 'category', 'height', 'width', 'quality', 'login_required', 'show_title') list_display_links = ('admin_thumb', 'title') list_editable = ('login_required', 'show_title', 'category', 'height', 'width', 'quality') search_fields = ('title', 'description', 'url') fieldsets = ( (None, { "fields": ["title", "description", "category", "image", 'url'], }), (_("Advanced data"), { "fields": [('height', 'width', 'quality'), 'login_required', 'show_title', "slug"], "classes": ("collapse-closed",) }), ) def in_menu(app_name): """ Hide from the admin menu unless explicitly set in ``ADMIN_MENU_ORDER``. """ for category, items in settings.ADMIN_MENU_ORDER: for item in items: if isinstance(item, (tuple, list)): if app_name in item: return True elif item == app_name: return True BlockCategoryAdmin.in_menu = partial(in_menu, "mezzanine_blocks.BlockCategory") BlockAdmin.in_menu = partial(in_menu, "mezzanine_blocks.Block") RichBlockAdmin.in_menu = partial(in_menu, "mezzanine_blocks.RichBlock") ImageBlockAdmin.in_menu = partial(in_menu, "mezzanine_blocks.ImageBlock") admin.site.register(BlockCategory, BlockCategoryAdmin) admin.site.register(Block, BlockAdmin) admin.site.register(RichBlock, RichBlockAdmin) admin.site.register(ImageBlock, ImageBlockAdmin)
{ "repo_name": "gradel/mezzanine-blocks", "path": "mezzanine_blocks/admin.py", "copies": "2", "size": "3067", "license": "bsd-2-clause", "hash": -4233607855893234000, "line_mean": 32.7032967033, "line_max": 117, "alpha_frac": 0.6123247473, "autogenerated": false, "ratio": 3.83375, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.54460747473, "avg_score": null, "num_lines": null }
from functools import partial from electrum.i18n import _ from electrum.plugin import hook from electrum.wallet import Standard_Wallet, Abstract_Wallet from ..hw_wallet.qt import QtHandlerBase, QtPluginBase from ..hw_wallet.plugin import only_hook_if_libraries_available from .digitalbitbox import DigitalBitboxPlugin class Plugin(DigitalBitboxPlugin, QtPluginBase): icon_unpaired = "digitalbitbox_unpaired.png" icon_paired = "digitalbitbox.png" def create_handler(self, window): return DigitalBitbox_Handler(window) @only_hook_if_libraries_available @hook def receive_menu(self, menu, addrs, wallet: Abstract_Wallet): if type(wallet) is not Standard_Wallet: return keystore = wallet.get_keystore() if type(keystore) is not self.keystore_class: return if not self.is_mobile_paired(): return if len(addrs) == 1: addr = addrs[0] if wallet.get_txin_type(addr) != 'p2pkh': return def show_address(): keystore.thread.add(partial(self.show_address, wallet, addr, keystore)) menu.addAction(_("Show on {}").format(self.device), show_address) class DigitalBitbox_Handler(QtHandlerBase): def __init__(self, win): super(DigitalBitbox_Handler, self).__init__(win, 'Digital Bitbox')
{ "repo_name": "spesmilo/electrum", "path": "electrum/plugins/digitalbitbox/qt.py", "copies": "1", "size": "1383", "license": "mit", "hash": -8456346065590484000, "line_mean": 29.7333333333, "line_max": 87, "alpha_frac": 0.6608821403, "autogenerated": false, "ratio": 3.809917355371901, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9966064647187052, "avg_score": 0.0009469696969696971, "num_lines": 45 }
from functools import partial from electrum.i18n import _ from electrum.plugin import hook from electrum.wallet import Standard_Wallet from ..hw_wallet.qt import QtHandlerBase, QtPluginBase from ..hw_wallet.plugin import only_hook_if_libraries_available from .digitalbitbox import DigitalBitboxPlugin class Plugin(DigitalBitboxPlugin, QtPluginBase): icon_unpaired = "digitalbitbox_unpaired.png" icon_paired = "digitalbitbox.png" def create_handler(self, window): return DigitalBitbox_Handler(window) @only_hook_if_libraries_available @hook def receive_menu(self, menu, addrs, wallet): if type(wallet) is not Standard_Wallet: return keystore = wallet.get_keystore() if type(keystore) is not self.keystore_class: return if not self.is_mobile_paired(): return if not keystore.is_p2pkh(): return if len(addrs) == 1: def show_address(): keystore.thread.add(partial(self.show_address, wallet, addrs[0], keystore)) menu.addAction(_("Show on {}").format(self.device), show_address) class DigitalBitbox_Handler(QtHandlerBase): def __init__(self, win): super(DigitalBitbox_Handler, self).__init__(win, 'Digital Bitbox')
{ "repo_name": "neocogent/electrum", "path": "electrum/plugins/digitalbitbox/qt.py", "copies": "2", "size": "1304", "license": "mit", "hash": -2641719993141253000, "line_mean": 27.9777777778, "line_max": 91, "alpha_frac": 0.6671779141, "autogenerated": false, "ratio": 3.835294117647059, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.00024154589371980676, "num_lines": 45 }
from functools import partial from electrum_ltc.i18n import _ from electrum_ltc.plugin import hook from electrum_ltc.wallet import Standard_Wallet, Abstract_Wallet from ..hw_wallet.qt import QtHandlerBase, QtPluginBase from ..hw_wallet.plugin import only_hook_if_libraries_available from .digitalbitbox import DigitalBitboxPlugin class Plugin(DigitalBitboxPlugin, QtPluginBase): icon_unpaired = "digitalbitbox_unpaired.png" icon_paired = "digitalbitbox.png" def create_handler(self, window): return DigitalBitbox_Handler(window) @only_hook_if_libraries_available @hook def receive_menu(self, menu, addrs, wallet: Abstract_Wallet): if type(wallet) is not Standard_Wallet: return keystore = wallet.get_keystore() if type(keystore) is not self.keystore_class: return if not self.is_mobile_paired(): return if len(addrs) == 1: addr = addrs[0] if wallet.get_txin_type(addr) != 'p2pkh': return def show_address(): keystore.thread.add(partial(self.show_address, wallet, addr, keystore)) menu.addAction(_("Show on {}").format(self.device), show_address) class DigitalBitbox_Handler(QtHandlerBase): def __init__(self, win): super(DigitalBitbox_Handler, self).__init__(win, 'Digital Bitbox')
{ "repo_name": "pooler/electrum-ltc", "path": "electrum_ltc/plugins/digitalbitbox/qt.py", "copies": "2", "size": "1395", "license": "mit", "hash": 2589778659147113000, "line_mean": 30, "line_max": 87, "alpha_frac": 0.6616487455, "autogenerated": false, "ratio": 3.75, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.54116487455, "avg_score": null, "num_lines": null }
from functools import partial from electrum_mona.i18n import _ from electrum_mona.plugin import hook from electrum_mona.wallet import Standard_Wallet, Abstract_Wallet from ..hw_wallet.qt import QtHandlerBase, QtPluginBase from ..hw_wallet.plugin import only_hook_if_libraries_available from .digitalbitbox import DigitalBitboxPlugin class Plugin(DigitalBitboxPlugin, QtPluginBase): icon_unpaired = "digitalbitbox_unpaired.png" icon_paired = "digitalbitbox.png" def create_handler(self, window): return DigitalBitbox_Handler(window) @only_hook_if_libraries_available @hook def receive_menu(self, menu, addrs, wallet: Abstract_Wallet): if type(wallet) is not Standard_Wallet: return keystore = wallet.get_keystore() if type(keystore) is not self.keystore_class: return if not self.is_mobile_paired(): return if len(addrs) == 1: addr = addrs[0] if wallet.get_txin_type(addr) != 'p2pkh': return def show_address(): keystore.thread.add(partial(self.show_address, wallet, addr, keystore)) menu.addAction(_("Show on {}").format(self.device), show_address) class DigitalBitbox_Handler(QtHandlerBase): def __init__(self, win): super(DigitalBitbox_Handler, self).__init__(win, 'Digital Bitbox')
{ "repo_name": "wakiyamap/electrum-mona", "path": "electrum_mona/plugins/digitalbitbox/qt.py", "copies": "1", "size": "1398", "license": "mit", "hash": -8842686966415837000, "line_mean": 30.0666666667, "line_max": 87, "alpha_frac": 0.6623748212, "autogenerated": false, "ratio": 3.7580645161290325, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4920439337329033, "avg_score": null, "num_lines": null }
from functools import partial from ...external.qt import QtGui from ...external.qt.QtCore import Qt from ...core import message as msg from ...clients.histogram_client import HistogramClient from ..widget_properties import (connect_int_spin, ButtonProperty, FloatLineProperty, ValueProperty) from ..glue_toolbar import GlueToolbar from ..mouse_mode import HRangeMode from .data_viewer import DataViewer from .mpl_widget import MplWidget, defer_draw from ..qtutil import pretty_number, load_ui __all__ = ['HistogramWidget'] WARN_SLOW = 10000000 def _hash(x): return str(id(x)) class HistogramWidget(DataViewer): LABEL = "Histogram" _property_set = DataViewer._property_set + \ 'component xlog ylog normed cumulative autoscale xmin xmax nbins'.split( ) xmin = FloatLineProperty('ui.xmin', 'Minimum value') xmax = FloatLineProperty('ui.xmax', 'Maximum value') normed = ButtonProperty('ui.normalized_box', 'Normalized?') autoscale = ButtonProperty('ui.autoscale_box', 'Autoscale view to histogram?') cumulative = ButtonProperty('ui.cumulative_box', 'Cumulative?') nbins = ValueProperty('ui.binSpinBox', 'Number of bins') xlog = ButtonProperty('ui.xlog_box', 'Log-scale the x axis?') ylog = ButtonProperty('ui.ylog_box', 'Log-scale the y axis?') def __init__(self, session, parent=None): super(HistogramWidget, self).__init__(session, parent) self.central_widget = MplWidget() self.setCentralWidget(self.central_widget) self.option_widget = QtGui.QWidget() self.ui = load_ui('histogramwidget', self.option_widget) self._tweak_geometry() self.client = HistogramClient(self._data, self.central_widget.canvas.fig, artist_container=self._container) self._init_limits() self.make_toolbar() self._connect() # maps _hash(componentID) -> componentID self._component_hashes = {} def _init_limits(self): validator = QtGui.QDoubleValidator(None) validator.setDecimals(7) self.ui.xmin.setValidator(validator) self.ui.xmax.setValidator(validator) lo, hi = self.client.xlimits self.ui.xmin.setText(str(lo)) self.ui.xmax.setText(str(hi)) def _tweak_geometry(self): self.central_widget.resize(600, 400) self.resize(self.central_widget.size()) def _connect(self): ui = self.ui cl = self.client ui.attributeCombo.currentIndexChanged.connect( self._set_attribute_from_combo) ui.attributeCombo.currentIndexChanged.connect( self._update_minmax_labels) connect_int_spin(cl, 'nbins', ui.binSpinBox) ui.normalized_box.toggled.connect(partial(setattr, cl, 'normed')) ui.autoscale_box.toggled.connect(partial(setattr, cl, 'autoscale')) ui.cumulative_box.toggled.connect(partial(setattr, cl, 'cumulative')) ui.xlog_box.toggled.connect(partial(setattr, cl, 'xlog')) ui.ylog_box.toggled.connect(partial(setattr, cl, 'ylog')) ui.xmin.editingFinished.connect(self._set_limits) ui.xmax.editingFinished.connect(self._set_limits) @defer_draw def _set_limits(self): lo = float(self.ui.xmin.text()) hi = float(self.ui.xmax.text()) self.client.xlimits = lo, hi def _update_minmax_labels(self): lo, hi = pretty_number(self.client.xlimits) self.ui.xmin.setText(lo) self.ui.xmax.setText(hi) def make_toolbar(self): result = GlueToolbar(self.central_widget.canvas, self, name='Histogram') for mode in self._mouse_modes(): result.add_mode(mode) self.addToolBar(result) return result def _mouse_modes(self): axes = self.client.axes def apply_mode(mode): return self.apply_roi(mode.roi()) rect = HRangeMode(axes, roi_callback=apply_mode) return [rect] @defer_draw def _update_attributes(self): """Repopulate the combo box that selects the quantity to plot""" combo = self.ui.attributeCombo component = self.component combo.blockSignals(True) combo.clear() # implementation note: # PySide doesn't robustly store python objects with setData # use _hash(x) instead model = QtGui.QStandardItemModel() data_ids = set(_hash(d) for d in self._data) self._component_hashes = dict((_hash(c), c) for d in self._data for c in d.components) for d in self._data: if d not in self._container: continue item = QtGui.QStandardItem(d.label) item.setData(_hash(d), role=Qt.UserRole) assert item.data(Qt.UserRole) == _hash(d) item.setFlags(item.flags() & ~Qt.ItemIsEnabled) model.appendRow(item) for c in d.visible_components: if not d.get_component(c).numeric: continue item = QtGui.QStandardItem(c.label) item.setData(_hash(c), role=Qt.UserRole) model.appendRow(item) combo.setModel(model) # separators below data items for i in range(combo.count()): if combo.itemData(i) in data_ids: combo.insertSeparator(i + 1) combo.blockSignals(False) if component is not None: self.component = component else: combo.setCurrentIndex(2) # skip first data + separator self._set_attribute_from_combo() @property def component(self): combo = self.ui.attributeCombo index = combo.currentIndex() return self._component_hashes.get(combo.itemData(index), None) @component.setter def component(self, component): combo = self.ui.attributeCombo if combo.count() == 0: # cold start problem, when restoring self._update_attributes() # combo.findData doesn't seem to work robustly for i in range(combo.count()): data = combo.itemData(i) if data == _hash(component): combo.setCurrentIndex(i) return raise IndexError("Component not present: %s" % component) @defer_draw def _set_attribute_from_combo(self, *args): self.client.set_component(self.component) self._update_window_title() @defer_draw def add_data(self, data): """ Add data item to combo box. If first addition, also update attributes """ if self.data_present(data): return True if data.size > WARN_SLOW and not self._confirm_large_data(data): return False self.client.add_layer(data) self._update_attributes() self._update_minmax_labels() return True def add_subset(self, subset): pass def _remove_data(self, data): """ Remove data item from the combo box """ pass def data_present(self, data): return data in self._container def register_to_hub(self, hub): super(HistogramWidget, self).register_to_hub(hub) self.client.register_to_hub(hub) hub.subscribe(self, msg.DataCollectionDeleteMessage, handler=lambda x: self._remove_data(x.data)) hub.subscribe(self, msg.DataUpdateMessage, handler=lambda *args: self._update_labels()) hub.subscribe(self, msg.ComponentsChangedMessage, handler=lambda x: self._update_attributes()) def unregister(self, hub): self.client.unregister(hub) hub.unsubscribe_all(self) def _update_window_title(self): c = self.client.component if c is not None: label = str(c.label) else: label = 'Histogram' self.setWindowTitle(label) def _update_labels(self): self._update_window_title() self._update_attributes() def __str__(self): return "Histogram Widget" def options_widget(self): return self.option_widget
{ "repo_name": "bsipocz/glue", "path": "glue/qt/widgets/histogram_widget.py", "copies": "1", "size": "8416", "license": "bsd-3-clause", "hash": 5826805330127545000, "line_mean": 32.664, "line_max": 80, "alpha_frac": 0.5980275665, "autogenerated": false, "ratio": 4.026794258373206, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.00004938271604938271, "num_lines": 250 }
from functools import partial from ...external.qt import QtGui from ...external.qt.QtCore import Qt from ... import core from ...clients.scatter_client import ScatterClient from ..glue_toolbar import GlueToolbar from ..mouse_mode import (RectangleMode, CircleMode, PolyMode, HRangeMode, VRangeMode) from ...core.callback_property import add_callback from ..ui.scatterwidget import Ui_ScatterWidget from .data_viewer import DataViewer from .mpl_widget import MplWidget from ..widget_properties import ButtonProperty, FloatLineProperty from ..qtutil import pretty_number WARN_SLOW = 1000000 # max number of points which render quickly def connect_bool_button(client, prop, widget): add_callback(client, prop, widget.setChecked) widget.toggled.connect(partial(setattr, client, prop)) def connect_float_edit(client, prop, widget): v = QtGui.QDoubleValidator(None) v.setDecimals(4) widget.setValidator(v) def update_prop(): val = widget.text() try: setattr(client, prop, float(val)) except ValueError: setattr(client, prop, 0) def update_widget(val): widget.setText(pretty_number(val)) add_callback(client, prop, update_widget) widget.editingFinished.connect(update_prop) update_widget(getattr(client, prop)) class ScatterWidget(DataViewer): LABEL = "Scatter Plot" xlog = ButtonProperty('ui.xLogCheckBox') ylog = ButtonProperty('ui.yLogCheckBox') xflip = ButtonProperty('ui.xFlipCheckBox') yflip = ButtonProperty('ui.yFlipCheckBox') xmin = FloatLineProperty('ui.xmin') xmax = FloatLineProperty('ui.xmax') ymin = FloatLineProperty('ui.ymin') ymax = FloatLineProperty('ui.ymax') hidden = ButtonProperty('ui.hidden_attributes') def __init__(self, data, parent=None): super(ScatterWidget, self).__init__(data, parent) self.central_widget = MplWidget() self.option_widget = QtGui.QWidget() self.setCentralWidget(self.central_widget) self.ui = Ui_ScatterWidget() self.ui.setupUi(self.option_widget) self._tweak_geometry() self._collection = data self.client = ScatterClient(self._collection, self.central_widget.canvas.fig, artist_container=self._container) self._connect() self.unique_fields = set() self.make_toolbar() self.statusBar().setSizeGripEnabled(False) self.setFocusPolicy(Qt.StrongFocus) def _tweak_geometry(self): self.central_widget.resize(600, 400) self.resize(self.central_widget.size()) def _connect(self): ui = self.ui cl = self.client connect_bool_button(cl, 'xlog', ui.xLogCheckBox) connect_bool_button(cl, 'ylog', ui.yLogCheckBox) connect_bool_button(cl, 'xflip', ui.xFlipCheckBox) connect_bool_button(cl, 'yflip', ui.yFlipCheckBox) ui.xAxisComboBox.currentIndexChanged.connect(self.update_xatt) ui.yAxisComboBox.currentIndexChanged.connect(self.update_yatt) ui.hidden_attributes.toggled.connect(lambda x: self._update_combos()) ui.swapAxes.clicked.connect(self.swap_axes) ui.snapLimits.clicked.connect(cl.snap) connect_float_edit(cl, 'xmin', ui.xmin) connect_float_edit(cl, 'xmax', ui.xmax) connect_float_edit(cl, 'ymin', ui.ymin) connect_float_edit(cl, 'ymax', ui.ymax) def _choose_add_data(self): choices = dict([(d.label, d) for d in self._collection]) dialog = QtGui.QInputDialog() choice, isok = dialog.getItem(self, "Data Chooser | Scatter Plot", "Choose a data set to add", choices.keys()) if not isok: return data = choices[str(choice)] self.add_data(data) def make_toolbar(self): result = GlueToolbar(self.central_widget.canvas, self, name='Scatter Plot') for mode in self._mouse_modes(): result.add_mode(mode) self.addToolBar(result) return result def _mouse_modes(self): axes = self.client.axes rect = RectangleMode(axes, roi_callback=self.apply_roi) xra = HRangeMode(axes, roi_callback=self.apply_roi) yra = VRangeMode(axes, roi_callback=self.apply_roi) circ = CircleMode(axes, roi_callback=self.apply_roi) poly = PolyMode(axes, roi_callback=self.apply_roi) return [rect, xra, yra, circ, poly] def apply_roi(self, mode): roi = mode.roi() self.client.apply_roi(roi) def _update_combos(self): """ Update combo boxes to current attribute fields""" layer_ids = [] for l in self.client.data: if not self.client.is_layer_present(l): continue for lid in self.client.plottable_attributes( l, show_hidden=self.hidden): if lid not in layer_ids: layer_ids.append(lid) xcombo = self.ui.xAxisComboBox ycombo = self.ui.yAxisComboBox xcombo.blockSignals(True) ycombo.blockSignals(True) xid = xcombo.itemData(xcombo.currentIndex()) yid = ycombo.itemData(ycombo.currentIndex()) xcombo.clear() ycombo.clear() for lid in layer_ids: xcombo.addItem(lid.label, userData=lid) ycombo.addItem(lid.label, userData=lid) for index, combo in zip([xid, yid], [xcombo, ycombo]): try: combo.setCurrentIndex(layer_ids.index(index)) except ValueError: combo.setCurrentIndex(0) xcombo.blockSignals(False) ycombo.blockSignals(False) def add_data(self, data): """Add a new data set to the widget :rtype: bool Returns True if the addition was expected, False otherwise """ if self.client.is_layer_present(data): return if data.size > WARN_SLOW and not self._confirm_large_data(data): return False first_layer = self.client.layer_count == 0 self.client.add_data(data) self._update_combos() if first_layer: # forces both x and y axes to be rescaled self.update_xatt(None) self.update_yatt(None) self.ui.xAxisComboBox.setCurrentIndex(0) if len(data.visible_components) > 1: self.ui.yAxisComboBox.setCurrentIndex(1) else: self.ui.yAxisComboBox.setCurrentIndex(0) self._update_window_title() return True def add_subset(self, subset): """Add a subset to the widget :rtype: bool: Returns True if the addition was accepted, False otherwise """ if self.client.is_layer_present(subset): return data = subset.data if data.size > WARN_SLOW and not self._confirm_large_data(data): return False first_layer = self.client.layer_count == 0 self.client.add_layer(subset) self._update_combos() if first_layer: # forces both x and y axes to be rescaled self.update_xatt(None) self.update_yatt(None) self.ui.xAxisComboBox.setCurrentIndex(0) if len(data.visible_components) > 1: self.ui.yAxisComboBox.setCurrentIndex(1) else: self.ui.yAxisComboBox.setCurrentIndex(0) self._update_window_title() return True def register_to_hub(self, hub): super(ScatterWidget, self).register_to_hub(hub) self.client.register_to_hub(hub) hub.subscribe(self, core.message.DataUpdateMessage, lambda x: self._sync_labels()) hub.subscribe(self, core.message.ComponentsChangedMessage, lambda x: self._update_combos()) def unregister(self, hub): hub.unsubscribe_all(self.client) hub.unsubscribe_all(self) def swap_axes(self): xid = self.ui.xAxisComboBox.currentIndex() yid = self.ui.yAxisComboBox.currentIndex() xlog = self.ui.xLogCheckBox.isChecked() ylog = self.ui.yLogCheckBox.isChecked() xflip = self.ui.xFlipCheckBox.isChecked() yflip = self.ui.yFlipCheckBox.isChecked() self.ui.xAxisComboBox.setCurrentIndex(yid) self.ui.yAxisComboBox.setCurrentIndex(xid) self.ui.xLogCheckBox.setChecked(ylog) self.ui.yLogCheckBox.setChecked(xlog) self.ui.xFlipCheckBox.setChecked(yflip) self.ui.yFlipCheckBox.setChecked(xflip) def update_xatt(self, index): combo = self.ui.xAxisComboBox component_id = combo.itemData(combo.currentIndex()) assert isinstance(component_id, core.data.ComponentID) self.client.xatt = component_id def update_yatt(self, index): combo = self.ui.yAxisComboBox component_id = combo.itemData(combo.currentIndex()) assert isinstance(component_id, core.data.ComponentID) self.client.yatt = component_id def _update_window_title(self): data = self.client.data label = ', '.join([d.label for d in data if self.client.is_visible(d)]) self.setWindowTitle(label) def _sync_labels(self): self._update_window_title() def __str__(self): return "Scatter Widget" def options_widget(self): return self.option_widget
{ "repo_name": "glue-viz/glue-qt", "path": "glue/qt/widgets/scatter_widget.py", "copies": "1", "size": "9598", "license": "bsd-3-clause", "hash": -5413475124257074000, "line_mean": 32.3263888889, "line_max": 77, "alpha_frac": 0.6180454261, "autogenerated": false, "ratio": 3.833067092651757, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9950616487005726, "avg_score": 0.0000992063492063492, "num_lines": 288 }
from functools import partial from ...external.qt.QtGui import (QWidget, QAction, QToolButton, QIcon, QMessageBox) from ...external.qt.QtCore import Qt import matplotlib.cm as cm from .data_viewer import DataViewer from ... import core from ...clients.image_client import ImageClient from ..mouse_mode import (RectangleMode, CircleMode, PolyMode, ContrastMode, ContourMode) from ..glue_toolbar import GlueToolbar from .mpl_widget import MplWidget from ..ui.imagewidget import Ui_ImageWidget from .. import glue_qt_resources # pylint: disable=W0611 from ..decorators import set_cursor from ..qtutil import cmap2pixmap, select_rgb WARN_THRESH = 10000000 # warn when contouring large images class ImageWidget(DataViewer): LABEL = "Image Viewer" def __init__(self, data, parent=None): super(ImageWidget, self).__init__(data, parent) self.central_widget = MplWidget() self.option_widget = QWidget() self.setCentralWidget(self.central_widget) self.ui = Ui_ImageWidget() self.ui.setupUi(self.option_widget) self.client = ImageClient(data, self.central_widget.canvas.fig, artist_container=self._container) self._tweak_geometry() self._create_actions() self.make_toolbar() self._connect() self._init_widgets() self.set_data(0) self.set_orientation(0) self.statusBar().setSizeGripEnabled(False) self.setFocusPolicy(Qt.StrongFocus) def _tweak_geometry(self): self.central_widget.resize(600, 400) self.resize(self.central_widget.size()) def _create_actions(self): #pylint: disable=E1101 def act(name, cmap): a = QAction(name, self) a.triggered.connect(lambda *args: self.client.set_cmap(cmap)) pm = cmap2pixmap(cmap) a.setIcon(QIcon(pm)) return a self._cmaps = [] self._cmaps.append(act('Gray', cm.gray)) self._cmaps.append(act('Purple-Blue', cm.PuBu)) self._cmaps.append(act('Yellow-Green-Blue', cm.YlGnBu)) self._cmaps.append(act('Yellow-Orange-Red', cm.YlOrRd)) self._cmaps.append(act('Red-Purple', cm.RdPu)) self._cmaps.append(act('Blue-Green', cm.BuGn)) self._cmaps.append(act('Hot', cm.hot)) self._cmaps.append(act('Red-Blue', cm.RdBu)) self._cmaps.append(act('Red-Yellow-Blue', cm.RdYlBu)) self._cmaps.append(act('Purple-Orange', cm.PuOr)) self._cmaps.append(act('Purple-Green', cm.PRGn)) self._rgb_add = QAction('RGB', self) self._rgb_add.triggered.connect(self._add_rgb) def _add_rgb(self): drgb = select_rgb(self._data, default=self.current_data) if drgb is not None: self.client.add_rgb_layer(*drgb) def make_toolbar(self): result = GlueToolbar(self.central_widget.canvas, self, name='Image') for mode in self._mouse_modes(): result.add_mode(mode) tb = QToolButton() tb.setWhatsThis("Set color scale") tb.setToolTip("Set color scale") icon = QIcon(":icons/glue_rainbow.png") tb.setIcon(icon) tb.setPopupMode(QToolButton.InstantPopup) tb.addActions(self._cmaps) result.addWidget(tb) result.addAction(self._rgb_add) #connect viewport update buttons to client commands to #allow resampling cl = self.client result.buttons['HOME'].triggered.connect(cl.check_update) result.buttons['FORWARD'].triggered.connect(cl.check_update) result.buttons['BACK'].triggered.connect(cl.check_update) self.addToolBar(result) return result @set_cursor(Qt.WaitCursor) def apply_roi(self, mode): roi = mode.roi() self.client.apply_roi(roi) def _mouse_modes(self): axes = self.client.axes rect = RectangleMode(axes, roi_callback=self.apply_roi) circ = CircleMode(axes, roi_callback=self.apply_roi) poly = PolyMode(axes, roi_callback=self.apply_roi) contrast = ContrastMode(axes, move_callback=self._set_norm) contour = ContourMode(axes, release_callback=self._contour_roi) return [rect, circ, poly, contour, contrast] def _init_widgets(self): self.ui.imageSlider.hide() self.ui.sliceComboBox.hide() self.ui.sliceComboBox.addItems(["xy", "xz", "yz"]) def add_data(self, data): """Private method to ingest new data into widget""" self.client.add_layer(data) self.add_data_to_combo(data) self.set_data(self._data_index(data)) return True def add_subset(self, subset): self.client.add_scatter_layer(subset) assert subset in self.client.artists def _data_index(self, data): combo = self.ui.displayDataCombo for i in range(combo.count()): if combo.itemData(i) is data: return i return None def add_data_to_combo(self, data): """ Add a data object to the combo box, if not already present """ if not self.client.can_image_data(data): return combo = self.ui.displayDataCombo label = data.label pos = combo.findText(label) if pos == -1: combo.addItem(label, userData=data) assert combo.findText(label) >= 0 @property def current_data(self): if self.ui.displayDataCombo.count() == 0: return index = self.ui.displayDataCombo.currentIndex() return self.ui.displayDataCombo.itemData(index) def set_data(self, index): if index is None: return if self.ui.displayDataCombo.count() == 0: return data = self.ui.displayDataCombo.itemData(index) self.client.set_data(data) self.ui.displayDataCombo.setCurrentIndex(index) self.set_attribute_combo(data) if not self.client.is_3D: self.ui.imageSlider.hide() self.ui.sliceComboBox.hide() self.ui.orientationLabel.hide() else: self.ui.imageSlider.show() self.ui.sliceComboBox.show() self.ui.orientationLabel.show() self.set_slider_range() self._update_window_title() def set_attribute(self, index): combo = self.ui.attributeComboBox component_id = combo.itemData(index) self.client.set_attribute(component_id) self.ui.attributeComboBox.setCurrentIndex(index) self._update_window_title() def set_attribute_combo(self, data): """ Update attribute combo box to reflect components in data""" combo = self.ui.attributeComboBox combo.blockSignals(True) combo.clear() fields = data.visible_components index = 0 for i, f in enumerate(fields): combo.addItem(f.label, userData=f) if f == self.client.display_attribute: index = i combo.blockSignals(False) combo.setCurrentIndex(index) self.set_attribute(index) def set_slider(self, index): self.client.slice_ind = index self.ui.imageSlider.setValue(index) def set_orientation(self, ori): # ignore for 2D data (sometimes gets triggered when widgets # switch state) if not self.client.is_3D: return self.client.set_slice_ori(ori) self.ui.sliceComboBox.setCurrentIndex(ori) self.set_slider_range() def set_slider_range(self): self.ui.imageSlider.setRange(*self.client.slice_bounds()) def _connect(self): ui = self.ui ui.displayDataCombo.currentIndexChanged.connect(self.set_data) ui.attributeComboBox.currentIndexChanged.connect(self.set_attribute) ui.sliceComboBox.currentIndexChanged.connect(self.set_orientation) ui.imageSlider.sliderMoved.connect(self.set_slider) def register_to_hub(self, hub): super(ImageWidget, self).register_to_hub(hub) self.client.register_to_hub(hub) dc_filt = lambda x: x.sender is self.client._data layer_present_filter = lambda x: x.data in self.client.artists hub.subscribe(self, core.message.DataCollectionAddMessage, handler=lambda x: self.add_data_to_combo(x.data), filter=dc_filt) hub.subscribe(self, core.message.DataCollectionDeleteMessage, handler=lambda x: self.remove_data_from_combo(x.data), filter=dc_filt) hub.subscribe(self, core.message.DataUpdateMessage, handler=lambda x: self._sync_data_labels() ) hub.subscribe(self, core.message.ComponentsChangedMessage, handler=lambda x: self.set_attribute_combo(x.data), filter=layer_present_filter) def unregister(self, hub): for obj in [self, self.client]: hub.unsubscribe_all(obj) def remove_data_from_combo(self, data): """ Remvoe a data object from the combo box, if present """ combo = self.ui.displayDataCombo pos = combo.findText(data.label) if pos >= 0: combo.removeItem(pos) def _set_norm(self, mode): """ Use the `ContrastMouseMode` to adjust the transfer function """ clip_lo, clip_hi = mode.get_clip_percentile() stretch = mode.stretch return self.client.set_norm(clip_lo=clip_lo, clip_hi=clip_hi, stretch=stretch, bias=mode.bias, contrast=mode.contrast) @set_cursor(Qt.WaitCursor) def _contour_roi(self, mode): """ Callback for ContourMode. Set edit_subset as new ROI """ im = self.client.display_data att = self.client.display_attribute if im is None or att is None: return if im.size > WARN_THRESH and not self._confirm_large_image(im): return roi = mode.roi(im[att]) if roi: self.client.apply_roi(roi) def _update_window_title(self): if self.client.display_data is None: title = '' else: title = "%s - %s" % (self.client.display_data.label, self.client.display_attribute.label) self.setWindowTitle(title) def _update_data_combo(self): combo = self.ui.displayDataCombo for i in range(combo.count()): combo.setItemText(i, combo.itemData(i).label) def _sync_data_labels(self): self._update_window_title() self._update_data_combo() def __str__(self): return "Image Widget" def _confirm_large_image(self, data): """Ask user to confirm expensive contour operations :rtype: bool. Whether the user wishes to continue """ warn_msg = ("WARNING: Image has %i pixels, and may render slowly." " Continue?" % data.size) title = "Contour large image?" ok = QMessageBox.Ok cancel = QMessageBox.Cancel buttons = ok | cancel result = QMessageBox.question(self, title, warn_msg, buttons=buttons, defaultButton=cancel) return result == ok def options_widget(self): return self.option_widget
{ "repo_name": "glue-viz/glue-qt", "path": "glue/qt/widgets/image_widget.py", "copies": "1", "size": "11658", "license": "bsd-3-clause", "hash": -2915125001636152300, "line_mean": 33.8, "line_max": 76, "alpha_frac": 0.5979584834, "autogenerated": false, "ratio": 3.8487949818421923, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9944993487552591, "avg_score": 0.00035199553792024396, "num_lines": 335 }
from functools import partial from flask import ( abort, flash, jsonify, redirect, render_template, request, url_for, ) from flask_login import current_user from notifications_python_client.errors import HTTPError from notifications_utils import LETTER_MAX_PAGE_COUNT, SMS_CHAR_COUNT_LIMIT from notifications_utils.pdf import is_letter_too_long from app import ( current_service, format_delta, nl2br, service_api_client, template_folder_api_client, template_statistics_client, ) from app.formatters import character_count, message_count from app.main import main, no_cookie from app.main.forms import ( BroadcastTemplateForm, EmailTemplateForm, LetterTemplateForm, LetterTemplatePostageForm, SearchTemplatesForm, SetTemplateSenderForm, SMSTemplateForm, TemplateAndFoldersSelectionForm, TemplateFolderForm, ) from app.main.views.send import get_sender_details from app.models.service import Service from app.models.template_list import TemplateList, TemplateLists from app.template_previews import TemplatePreview, get_page_count_for_letter from app.utils import NOTIFICATION_TYPES, should_skip_template_page from app.utils.templates import get_template from app.utils.user import user_has_permissions, user_is_platform_admin form_objects = { 'email': EmailTemplateForm, 'sms': SMSTemplateForm, 'letter': LetterTemplateForm, 'broadcast': BroadcastTemplateForm, } @main.route("/services/<uuid:service_id>/templates/<uuid:template_id>") @user_has_permissions() def view_template(service_id, template_id): template = current_service.get_template(template_id) template_folder = current_service.get_template_folder(template['folder']) user_has_template_permission = current_user.has_template_folder_permission(template_folder) if should_skip_template_page(template['template_type']): return redirect(url_for( '.set_sender', service_id=service_id, template_id=template_id )) page_count = get_page_count_for_letter(template) return render_template( 'views/templates/template.html', template=get_template( template, current_service, letter_preview_url=url_for( 'no_cookie.view_letter_template_preview', service_id=service_id, template_id=template_id, filetype='png', ), show_recipient=True, page_count=get_page_count_for_letter(template), ), template_postage=template["postage"], user_has_template_permission=user_has_template_permission, letter_too_long=is_letter_too_long(page_count), letter_max_pages=LETTER_MAX_PAGE_COUNT, page_count=page_count ) @main.route("/services/<uuid:service_id>/templates/all", methods=['GET', 'POST']) @main.route("/services/<uuid:service_id>/templates", methods=['GET', 'POST']) @main.route("/services/<uuid:service_id>/templates/folders/<uuid:template_folder_id>", methods=['GET', 'POST']) @main.route("/services/<uuid:service_id>/templates/<template_type:template_type>", methods=['GET', 'POST']) @main.route("/services/<uuid:service_id>/templates/all/folders/<uuid:template_folder_id>", methods=['GET', 'POST']) @main.route( "/services/<uuid:service_id>/templates/<template_type:template_type>/folders/<uuid:template_folder_id>", methods=['GET', 'POST'] ) @user_has_permissions() def choose_template(service_id, template_type='all', template_folder_id=None): template_folder = current_service.get_template_folder(template_folder_id) user_has_template_folder_permission = current_user.has_template_folder_permission(template_folder) template_list = TemplateList(current_service, template_type, template_folder_id, current_user) templates_and_folders_form = TemplateAndFoldersSelectionForm( all_template_folders=current_service.get_user_template_folders(current_user), template_list=template_list, template_type=template_type, available_template_types=current_service.available_template_types, allow_adding_copy_of_template=( current_service.all_templates or len(current_user.service_ids) > 1 ), ) option_hints = {template_folder_id: 'current folder'} single_notification_channel = None notification_channels = list(set(current_service.permissions).intersection(NOTIFICATION_TYPES)) if len(notification_channels) == 1: single_notification_channel = notification_channels[0] if request.method == 'POST' and templates_and_folders_form.validate_on_submit(): if not current_user.has_permissions('manage_templates'): abort(403) try: return process_folder_management_form(templates_and_folders_form, template_folder_id) except HTTPError as e: flash(e.message) elif templates_and_folders_form.trying_to_add_unavailable_template_type: return redirect(url_for( '.action_blocked', service_id=current_service.id, notification_type=templates_and_folders_form.add_template_by_template_type.data, return_to='add_new_template', )) if 'templates_and_folders' in templates_and_folders_form.errors: flash('Select at least one template or folder') initial_state = request.args.get('initial_state') if request.method == 'GET' and initial_state: templates_and_folders_form.op = initial_state return render_template( 'views/templates/choose.html', current_template_folder_id=template_folder_id, template_folder_path=current_service.get_template_folder_path(template_folder_id), template_list=template_list, show_search_box=current_service.count_of_templates_and_folders > 7, show_template_nav=( current_service.has_multiple_template_types and (len(current_service.all_templates) > 2) ), template_nav_items=get_template_nav_items(template_folder_id), template_type=template_type, search_form=SearchTemplatesForm(current_service.api_keys), templates_and_folders_form=templates_and_folders_form, move_to_children=templates_and_folders_form.move_to.children(), user_has_template_folder_permission=user_has_template_folder_permission, single_notification_channel=single_notification_channel, option_hints=option_hints ) def process_folder_management_form(form, current_folder_id): current_service.get_template_folder_with_user_permission_or_403(current_folder_id, current_user) new_folder_id = None if form.is_add_template_op: return _add_template_by_type( form.add_template_by_template_type.data, current_folder_id, ) if form.is_add_folder_op: new_folder_id = template_folder_api_client.create_template_folder( current_service.id, name=form.get_folder_name(), parent_id=current_folder_id ) if form.is_move_op: # if we've just made a folder, we also want to move there move_to_id = new_folder_id or form.move_to.data current_service.move_to_folder( ids_to_move=form.templates_and_folders.data, move_to=move_to_id ) return redirect(request.url) def get_template_nav_label(value): return { 'all': 'All', 'sms': 'Text message', 'email': 'Email', 'letter': 'Letter', 'broadcast': 'Broadcast', }[value] def get_template_nav_items(template_folder_id): return [ ( get_template_nav_label(key), key, url_for( '.choose_template', service_id=current_service.id, template_type=key, template_folder_id=template_folder_id ), '' ) for key in ['all'] + current_service.available_template_types ] @no_cookie.route("/services/<uuid:service_id>/templates/<uuid:template_id>.<filetype>") @user_has_permissions() def view_letter_template_preview(service_id, template_id, filetype): if filetype not in ('pdf', 'png'): abort(404) db_template = current_service.get_template(template_id) return TemplatePreview.from_database_object(db_template, filetype, page=request.args.get('page')) @no_cookie.route("/templates/letter-preview-image/<filename>") @user_is_platform_admin def letter_branding_preview_image(filename): template = { 'subject': 'An example letter', 'content': ( 'Lorem Ipsum is simply dummy text of the printing and typesetting ' 'industry.\n\nLorem Ipsum has been the industry’s standard dummy ' 'text ever since the 1500s, when an unknown printer took a galley ' 'of type and scrambled it to make a type specimen book.\n\n' '# History\n\nIt has survived not only\n\n' '* five centuries\n' '* but also the leap into electronic typesetting\n\n' 'It was popularised in the 1960s with the release of Letraset ' 'sheets containing Lorem Ipsum passages, and more recently with ' 'desktop publishing software like Aldus PageMaker including ' 'versions of Lorem Ipsum.\n\n' 'The point of using Lorem Ipsum is that it has a more-or-less ' 'normal distribution of letters, as opposed to using ‘Content ' 'here, content here’, making it look like readable English.' ), 'template_type': 'letter', } filename = None if filename == 'no-branding' else filename return TemplatePreview.from_example_template(template, filename) def _view_template_version(service_id, template_id, version, letters_as_pdf=False): return dict(template=get_template( current_service.get_template(template_id, version=version), current_service, letter_preview_url=url_for( 'no_cookie.view_template_version_preview', service_id=service_id, template_id=template_id, version=version, filetype='png', ) if not letters_as_pdf else None )) @main.route("/services/<uuid:service_id>/templates/<uuid:template_id>/version/<int:version>") @user_has_permissions() def view_template_version(service_id, template_id, version): return render_template( 'views/templates/template_history.html', **_view_template_version(service_id=service_id, template_id=template_id, version=version) ) @no_cookie.route("/services/<uuid:service_id>/templates/<uuid:template_id>/version/<int:version>.<filetype>") @user_has_permissions() def view_template_version_preview(service_id, template_id, version, filetype): db_template = current_service.get_template(template_id, version=version) return TemplatePreview.from_database_object(db_template, filetype) def _add_template_by_type(template_type, template_folder_id): if template_type == 'copy-existing': return redirect(url_for( '.choose_template_to_copy', service_id=current_service.id, )) if template_type == 'letter': blank_letter = service_api_client.create_service_template( 'New letter template', 'letter', 'Body', current_service.id, 'Main heading', 'normal', template_folder_id ) return redirect(url_for( '.view_template', service_id=current_service.id, template_id=blank_letter['data']['id'], )) return redirect(url_for( '.add_service_template', service_id=current_service.id, template_type=template_type, template_folder_id=template_folder_id, )) @main.route("/services/<uuid:service_id>/templates/copy") @main.route("/services/<uuid:service_id>/templates/copy/from-folder/<uuid:from_folder>") @main.route("/services/<uuid:service_id>/templates/copy/from-service/<uuid:from_service>") @main.route( "/services/<uuid:service_id>/templates/copy/from-service/<uuid:from_service>/from-folder/<uuid:from_folder>" ) @user_has_permissions('manage_templates') def choose_template_to_copy( service_id, from_service=None, from_folder=None, ): if from_service: current_user.belongs_to_service_or_403(from_service) service = Service( service_api_client.get_service(from_service)['data'] ) return render_template( 'views/templates/copy.html', services_templates_and_folders=TemplateList( service, template_folder_id=from_folder, user=current_user ), template_folder_path=service.get_template_folder_path(from_folder), from_service=service, search_form=SearchTemplatesForm(current_service.api_keys), ) else: return render_template( 'views/templates/copy.html', services_templates_and_folders=TemplateLists(current_user), search_form=SearchTemplatesForm(current_service.api_keys), ) @main.route("/services/<uuid:service_id>/templates/copy/<uuid:template_id>", methods=['GET', 'POST']) @user_has_permissions('manage_templates') def copy_template(service_id, template_id): from_service = request.args.get('from_service') current_user.belongs_to_service_or_403(from_service) template = service_api_client.get_service_template(from_service, template_id)['data'] template_folder = template_folder_api_client.get_template_folder(from_service, template['folder']) if not current_user.has_template_folder_permission(template_folder): abort(403) if request.method == 'POST': return add_service_template(service_id, template['template_type']) template['template_content'] = template['content'] template['name'] = _get_template_copy_name(template, current_service.all_templates) form = form_objects[template['template_type']](**template) return render_template( 'views/edit-{}-template.html'.format(template['template_type']), form=form, template=template, heading_action='Add', services=current_user.service_ids, ) def _get_template_copy_name(template, existing_templates): template_names = [existing['name'] for existing in existing_templates] for index in reversed(range(1, 10)): if '{} (copy {})'.format(template['name'], index) in template_names: return '{} (copy {})'.format(template['name'], index + 1) if '{} (copy)'.format(template['name']) in template_names: return '{} (copy 2)'.format(template['name']) return '{} (copy)'.format(template['name']) @main.route(( '/services/<uuid:service_id>/templates/action-blocked/' '<template_type:notification_type>/<return_to>' )) @main.route(( '/services/<uuid:service_id>/templates/action-blocked/' '<template_type:notification_type>/<return_to>/<uuid:template_id>' )) @user_has_permissions('manage_templates') def action_blocked(service_id, notification_type, return_to, template_id=None): back_link = { 'add_new_template': partial( url_for, '.choose_template', service_id=current_service.id ), 'templates': partial( url_for, '.choose_template', service_id=current_service.id ), 'view_template': partial( url_for, '.view_template', service_id=current_service.id, template_id=template_id ), }.get(return_to) return render_template( 'views/templates/action_blocked.html', service_id=service_id, notification_type=notification_type, back_link=back_link(), ), 403 @main.route("/services/<uuid:service_id>/templates/folders/<uuid:template_folder_id>/manage", methods=['GET', 'POST']) @user_has_permissions('manage_templates') def manage_template_folder(service_id, template_folder_id): template_folder = current_service.get_template_folder_with_user_permission_or_403(template_folder_id, current_user) form = TemplateFolderForm( name=template_folder['name'], users_with_permission=template_folder.get('users_with_permission', None), all_service_users=[user for user in current_service.active_users if user.id != current_user.id] ) if form.validate_on_submit(): if current_user.has_permissions("manage_service") and form.users_with_permission.all_service_users: users_with_permission = form.users_with_permission.data + [current_user.id] else: users_with_permission = None template_folder_api_client.update_template_folder( current_service.id, template_folder_id, name=form.name.data, users_with_permission=users_with_permission ) return redirect( url_for('.choose_template', service_id=service_id, template_folder_id=template_folder_id) ) return render_template( 'views/templates/manage-template-folder.html', form=form, template_folder_path=current_service.get_template_folder_path(template_folder_id), current_service_id=current_service.id, template_folder_id=template_folder_id, template_type="all", ) @main.route("/services/<uuid:service_id>/templates/folders/<uuid:template_folder_id>/delete", methods=['GET', 'POST']) @user_has_permissions('manage_templates') def delete_template_folder(service_id, template_folder_id): template_folder = current_service.get_template_folder_with_user_permission_or_403(template_folder_id, current_user) if len(current_service.get_template_folders_and_templates( template_type="all", template_folder_id=template_folder_id )) > 0: flash("You must empty this folder before you can delete it", 'info') return redirect( url_for( '.choose_template', service_id=service_id, template_type="all", template_folder_id=template_folder_id ) ) if request.method == 'POST': try: template_folder_api_client.delete_template_folder(current_service.id, template_folder_id) return redirect( url_for('.choose_template', service_id=service_id, template_folder_id=template_folder['parent_id']) ) except HTTPError as e: msg = "Folder is not empty" if e.status_code == 400 and msg in e.message: flash("You must empty this folder before you can delete it", 'info') return redirect( url_for( '.choose_template', service_id=service_id, template_type="all", template_folder_id=template_folder_id ) ) else: abort(500, e) else: flash("Are you sure you want to delete the ‘{}’ folder?".format(template_folder['name']), 'delete') return manage_template_folder(service_id, template_folder_id) @main.route( "/services/<uuid:service_id>/templates/add-<template_type:template_type>", methods=['GET', 'POST'], ) @main.route( "/services/<uuid:service_id>/templates/folders/<uuid:template_folder_id>/add-<template_type:template_type>", methods=['GET', 'POST'], ) @user_has_permissions('manage_templates') def add_service_template(service_id, template_type, template_folder_id=None): if template_type not in current_service.available_template_types: return redirect(url_for( '.action_blocked', service_id=service_id, notification_type=template_type, template_folder_id=template_folder_id, return_to='templates', )) form = form_objects[template_type]() if form.validate_on_submit(): if form.process_type.data == 'priority': abort_403_if_not_admin_user() try: new_template = service_api_client.create_service_template( form.name.data, template_type, form.template_content.data, service_id, form.subject.data if hasattr(form, 'subject') else None, form.process_type.data, template_folder_id ) except HTTPError as e: if ( e.status_code == 400 and 'content' in e.message and any(['character count greater than' in x for x in e.message['content']]) ): form.template_content.errors.extend(e.message['content']) else: raise e else: return redirect( url_for('.view_template', service_id=service_id, template_id=new_template['data']['id']) ) return render_template( 'views/edit-{}-template.html'.format(template_type), form=form, template_type=template_type, template_folder_id=template_folder_id, heading_action='New', ) def abort_403_if_not_admin_user(): if not current_user.platform_admin: abort(403) @main.route("/services/<uuid:service_id>/templates/<uuid:template_id>/edit", methods=['GET', 'POST']) @user_has_permissions('manage_templates') def edit_service_template(service_id, template_id): template = current_service.get_template_with_user_permission_or_403(template_id, current_user) template['template_content'] = template['content'] form = form_objects[template['template_type']](**template) if form.validate_on_submit(): if form.process_type.data != template['process_type']: abort_403_if_not_admin_user() subject = form.subject.data if hasattr(form, 'subject') else None new_template_data = { 'name': form.name.data, 'content': form.template_content.data, 'subject': subject, 'template_type': template['template_type'], 'id': template['id'], 'process_type': form.process_type.data, 'reply_to_text': template['reply_to_text'], } new_template = get_template(new_template_data, current_service) template_change = get_template(template, current_service).compare_to(new_template) if ( template_change.placeholders_added and not request.form.get('confirm') and current_service.api_keys ): return render_template( 'views/templates/breaking-change.html', template_change=template_change, new_template=new_template, form=form, ) try: service_api_client.update_service_template( template_id, form.name.data, template['template_type'], form.template_content.data, service_id, subject, form.process_type.data, ) except HTTPError as e: if e.status_code == 400: if 'content' in e.message and any(['character count greater than' in x for x in e.message['content']]): form.template_content.errors.extend(e.message['content']) else: raise e else: raise e else: return redirect(url_for( '.view_template', service_id=service_id, template_id=template_id )) if template['template_type'] not in current_service.available_template_types: return redirect(url_for( '.action_blocked', service_id=service_id, notification_type=template['template_type'], return_to='view_template', template_id=template_id )) else: return render_template( 'views/edit-{}-template.html'.format(template['template_type']), form=form, template=template, heading_action='Edit', ) @main.route( "/services/<uuid:service_id>/templates/count-<template_type:template_type>-length", methods=['POST'], ) @user_has_permissions() def count_content_length(service_id, template_type): if template_type not in {'sms', 'broadcast'}: abort(404) error, message = _get_content_count_error_and_message_for_template( get_template({ 'template_type': template_type, 'content': request.form.get('template_content', ''), }, current_service) ) return jsonify({ 'html': render_template( 'partials/templates/content-count-message.html', error=error, message=message, ) }) def _get_content_count_error_and_message_for_template(template): if template.template_type == 'sms': if template.is_message_too_long(): return True, ( f'You have ' f'{character_count(template.content_count_without_prefix - SMS_CHAR_COUNT_LIMIT)} ' f'too many' ) if template.placeholders: return False, ( f'Will be charged as {message_count(template.fragment_count, template.template_type)} ' f'(not including personalisation)' ) return False, ( f'Will be charged as {message_count(template.fragment_count, template.template_type)} ' ) if template.template_type == 'broadcast': if template.content_too_long: return True, ( f'You have ' f'{character_count(template.encoded_content_count - template.max_content_count)} ' f'too many' ) else: return False, ( f'You have ' f'{character_count(template.max_content_count - template.encoded_content_count)} ' f'remaining' ) @main.route("/services/<uuid:service_id>/templates/<uuid:template_id>/delete", methods=['GET', 'POST']) @user_has_permissions('manage_templates') def delete_service_template(service_id, template_id): template = current_service.get_template_with_user_permission_or_403(template_id, current_user) if request.method == 'POST': service_api_client.delete_service_template(service_id, template_id) return redirect(url_for( '.choose_template', service_id=service_id, template_folder_id=template['folder'], )) try: last_used_notification = template_statistics_client.get_last_used_date_for_template( service_id, template['id'] ) message = 'This template has never been used.' if not last_used_notification else \ 'This template was last used {}.'.format(format_delta(last_used_notification)) except HTTPError as e: if e.status_code == 404: message = None else: raise e flash(["Are you sure you want to delete ‘{}’?".format(template['name']), message, template['name']], 'delete') return render_template( 'views/templates/template.html', template=get_template( template, current_service, letter_preview_url=url_for( 'no_cookie.view_letter_template_preview', service_id=service_id, template_id=template['id'], filetype='png', ), show_recipient=True, ), user_has_template_permission=True, ) @main.route("/services/<uuid:service_id>/templates/<uuid:template_id>/redact", methods=['GET']) @user_has_permissions('manage_templates') def confirm_redact_template(service_id, template_id): template = current_service.get_template_with_user_permission_or_403(template_id, current_user) return render_template( 'views/templates/template.html', template=get_template( template, current_service, letter_preview_url=url_for( 'no_cookie.view_letter_template_preview', service_id=service_id, template_id=template_id, filetype='png', ), show_recipient=True, ), user_has_template_permission=True, show_redaction_message=True, ) @main.route("/services/<uuid:service_id>/templates/<uuid:template_id>/redact", methods=['POST']) @user_has_permissions('manage_templates') def redact_template(service_id, template_id): service_api_client.redact_service_template(service_id, template_id) flash( 'Personalised content will be hidden for messages sent with this template', 'default_with_tick' ) return redirect(url_for( '.view_template', service_id=service_id, template_id=template_id, )) @main.route('/services/<uuid:service_id>/templates/<uuid:template_id>/versions') @user_has_permissions('view_activity') def view_template_versions(service_id, template_id): return render_template( 'views/templates/choose_history.html', versions=[ get_template( template, current_service, letter_preview_url=url_for( 'no_cookie.view_template_version_preview', service_id=service_id, template_id=template_id, version=template['version'], filetype='png', ) ) for template in service_api_client.get_service_template_versions(service_id, template_id)['data'] ] ) @main.route('/services/<uuid:service_id>/templates/<uuid:template_id>/set-template-sender', methods=['GET', 'POST']) @user_has_permissions('manage_templates') def set_template_sender(service_id, template_id): template = current_service.get_template_with_user_permission_or_403(template_id, current_user) sender_details = get_template_sender_form_dict(service_id, template) no_senders = sender_details.get('no_senders', False) form = SetTemplateSenderForm( sender=sender_details['current_choice'], sender_choices=sender_details['value_and_label'], ) form.sender.param_extensions = {'items': []} for item_value, _item_label in sender_details['value_and_label']: if item_value == sender_details['default_sender']: extensions = {'hint': {'text': '(Default)'}} else: extensions = {} # if no extensions needed, send an empty dict to preserve order of items form.sender.param_extensions['items'].append(extensions) if form.validate_on_submit(): service_api_client.update_service_template_sender( service_id, template_id, form.sender.data if form.sender.data else None, ) return redirect(url_for('.view_template', service_id=service_id, template_id=template_id)) return render_template( 'views/templates/set-template-sender.html', form=form, template_id=template_id, no_senders=no_senders ) @main.route('/services/<uuid:service_id>/templates/<uuid:template_id>/edit-postage', methods=['GET', 'POST']) @user_has_permissions('manage_templates') def edit_template_postage(service_id, template_id): template = current_service.get_template_with_user_permission_or_403(template_id, current_user) if template["template_type"] != "letter": abort(404) form = LetterTemplatePostageForm(**template) if form.validate_on_submit(): postage = form.postage.data service_api_client.update_service_template_postage(service_id, template_id, postage) return redirect(url_for('.view_template', service_id=service_id, template_id=template_id)) return render_template( 'views/templates/edit-template-postage.html', form=form, service_id=service_id, template_id=template_id, template_postage=template["postage"] ) def get_template_sender_form_dict(service_id, template): context = { 'email': { 'field_name': 'email_address' }, 'letter': { 'field_name': 'contact_block' }, 'sms': { 'field_name': 'sms_sender' } }[template['template_type']] sender_format = context['field_name'] service_senders = get_sender_details(service_id, template['template_type']) context['default_sender'] = next( (x['id'] for x in service_senders if x['is_default']), "Not set" ) if not service_senders: context['no_senders'] = True context['value_and_label'] = [(sender['id'], nl2br(sender[sender_format])) for sender in service_senders] context['value_and_label'].insert(0, ('', 'Blank')) # Add blank option to start of list context['current_choice'] = template['service_letter_contact'] if template['service_letter_contact'] else '' return context
{ "repo_name": "alphagov/notifications-admin", "path": "app/main/views/templates.py", "copies": "1", "size": "33330", "license": "mit", "hash": -5713354100618648000, "line_mean": 36.0177777778, "line_max": 119, "alpha_frac": 0.6263657102, "autogenerated": false, "ratio": 3.8943308007013444, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0009239233093790476, "num_lines": 900 }
from functools import partial from flooder import Flooder from grid_func import GridFunc def _point_callback(gf, point_spec): if 'coords' in point_spec: coords = point_spec['coords'] if 'min' in point_spec and point_spec['min'] is True: return gf.minimize(coords) else: return gf.map_nearest(coords) if 'range' in point_spec: return gf.g_minimize(*point_spec['range']) raise RuntimeError('Unknown point_spec') def _evaluate_path(gf, points): flooder = Flooder(gf) pstart, pend = points[0](gf), points[1](gf) path = flooder.flood(pstart, pend) for i in range(2, len(points)): pstart = pend pend = points[i](gf) del path[-1] path.extend(flooder.flood(pstart, pend)) return path # surface: /path/to/surface-file # smooth: # - sigma: 4.5 # cval: 0 # save: true # - sigma: 3.4 # save: false # - sigma: 20.0 # points: # - coords: [2.3, 4.5] # min: true # - range: [[null,4], [3.5,null]] def main(config): points = [] unsmoothed_gf = GridFunc.from_file(config['surface']) surfaces = [unsmoothed_gf] for point_spec in config['points']: points.append(partial(_point_callback, point_spec=point_spec)) for smooth_spec in config['smooth']: sigma = smooth_spec['sigma'] cval = smooth_spec['cval'] if 'cval' in smooth_spec else 0. smoothed_gf = unsmoothed_gf.smooth(sigma, cval, copy=True) surfaces.append(smoothed_gf) if 'save' in smooth_spec and smooth_spec['save'] is True: pass # TODO build filename from smooth_spec pathes = [_evaluate_path(surface, points) for surface in surfaces] return pathes # Tests if __name__ == '__main__': config = {'points': [{'range': [None, [None, 0.5]]}, {'range': [None, [0.5, None]]}, {'range': [None, [None, 0.5]]}], 'smooth': [{'cval': 0, 'save': True, 'sigma': 1.8}, {'sigma': [3.2, 3.8]}], 'surface': '../data/surface.txt'} pathes = main(config) for path in pathes: print path print pathes[-1].coords_idx
{ "repo_name": "tzelleke/MinEnergyPath", "path": "mepfinder/main.py", "copies": "1", "size": "2203", "license": "mit", "hash": -3291529251176905000, "line_mean": 30.0281690141, "line_max": 70, "alpha_frac": 0.5687698593, "autogenerated": false, "ratio": 3.216058394160584, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9284828253460584, "avg_score": 0, "num_lines": 71 }
from functools import partial from gouda.engines import (AccusoftEngine, DataSymbolEngine, DTKEngine, InliteEngine, LibDMTXEngine, StecosEngine, SoftekEngine, ZbarEngine, ZxingEngine) def engine_options(): """Returns a dict mapping textual descriptions to functions that return an engine. """ options = { 'libdmtx': LibDMTXEngine, 'zbar': ZbarEngine, 'zxing': ZxingEngine, } options = {k: v for k, v in options.items() if v.available()} if AccusoftEngine.available(): options.update({ 'accusoft-1d': partial(AccusoftEngine, datamatrix=False), 'accusoft-dm': partial(AccusoftEngine, datamatrix=True), }) if DataSymbolEngine.available(): options.update({ 'datasymbol-1d': partial(DataSymbolEngine, datamatrix=False), 'datasymbol-dm': partial(DataSymbolEngine, datamatrix=True), }) if DTKEngine.available(): options.update({ 'dtk-1d': partial(DTKEngine, datamatrix=False), 'dtk-dm': partial(DTKEngine, datamatrix=True), }) if InliteEngine.available(): options.update({ 'inlite-1d': partial(InliteEngine, format='1d'), 'inlite-dm': partial(InliteEngine, format='datamatrix'), 'inlite-pdf417': partial(InliteEngine, format='pdf417'), 'inlite-qrcode': partial(InliteEngine, format='qrcode'), }) if StecosEngine.available(): options.update({ 'stecos-1d': partial(StecosEngine, datamatrix=False), 'stecos-dm': partial(StecosEngine, datamatrix=True), }) if SoftekEngine.available(): options.update({ 'softek-1d': partial(SoftekEngine, datamatrix=False), 'softek-dm': partial(SoftekEngine, datamatrix=True), }) return options
{ "repo_name": "NaturalHistoryMuseum/gouda", "path": "gouda/engines/options.py", "copies": "1", "size": "1921", "license": "mit", "hash": -3079302707807447600, "line_mean": 32.1206896552, "line_max": 78, "alpha_frac": 0.6038521603, "autogenerated": false, "ratio": 3.751953125, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.48558052853, "avg_score": null, "num_lines": null }
from functools import partial from graceful.resources.base import BaseResource from graceful.resources.mixins import ( RetrieveMixin, ListMixin, UpdateMixin, CreateMixin, DeleteMixin, PaginatedMixin, CreateBulkMixin ) class Resource(RetrieveMixin, BaseResource): """Basic retrieval of resource instance lists without serialization. This resource class is intended for endpoints that do not require automatic representation serialization and extensive field descriptions but still gives support for defining parameters as resource class attributes. Example usage: .. code-block: from graceful.resources.generic import Resource from graceful.parameters import StringParam class SampleResource(Resource): dummy = StringParam("some example dummy parameter") def retrieve(self, params, meta): return {"sample": "resource"} """ class ListResource(ListMixin, BaseResource): """Basic retrieval of resource instance lists without serialization. This resource class is intended for endpoints that do not require automatic representation serialization and extensive field descriptions but still gives support for defining parameters as resource class attributes. Example usage: .. code-block: from graceful.resources.generic import ListResource from graceful.parameters import StringParam class SampleResource(ListResource): some_filter = StringParam("some example filter parameter") def list(self, params, meta): return [{"sample": "resource"}] """ class RetrieveAPI(RetrieveMixin, BaseResource): """Generic Retrieve API with resource serialization. Generic resource that uses serializer for resource description, serialization and validation. Allowed methods: * GET: retrieve resource representation (handled with ``.retrieve()`` method handler) """ serializer = None def describe(self, req=None, resp=None, **kwargs): """Extend default endpoint description with serializer description.""" return super().describe( req, resp, type='object', fields=self.serializer.describe() if self.serializer else None, **kwargs ) def _retrieve(self, params, meta, **kwargs): return self.serializer.to_representation( self.retrieve(params, meta, **kwargs) ) def on_get(self, req, resp, **kwargs): """Respond on GET requests using ``self.retrieve()`` handler.""" return super().on_get( req, resp, handler=self._retrieve, **kwargs ) class RetrieveUpdateAPI(UpdateMixin, RetrieveAPI): """Generic Retrieve/Update API with resource serialization. Generic resource that uses serializer for resource description, serialization and validation. Allowed methods: * GET: retrieve resource representation handled with ``.retrieve()`` method handler * PUT: update resource with representation provided in request body (handled with ``.update()`` method handler) """ def _update(self, params, meta, **kwargs): return self.serializer.to_representation( self.update(params, meta, **kwargs) ) def on_put(self, req, resp, **kwargs): """Respond on PUT requests using ``self.update()`` handler.""" validated = self.require_validated(req) return super().on_put( req, resp, handler=partial(self._update, validated=validated), **kwargs ) class RetrieveUpdateDeleteAPI(DeleteMixin, RetrieveUpdateAPI): """Generic Retrieve/Update/Delete API with resource serialization. Generic resource that uses serializer for resource description, serialization and validation. Allowed methods: * GET: retrieve resource representation (handled with ``.retrieve()`` method handler) * PUT: update resource with representation provided in request body (handled with ``.update()`` method handler) * DELETE: delete resource (handled with ``.delete()`` method handler) """ class ListAPI(ListMixin, BaseResource): """Generic List API with resource serialization. Generic resource that uses serializer for resource description, serialization and validation. Allowed methods: * GET: list multiple resource instances representations (handled with ``.list()`` method handler) """ def _list(self, params, meta, **kwargs): return [ self.serializer.to_representation(obj) for obj in self.list(params, meta, **kwargs) ] def describe(self, req=None, resp=None, **kwargs): """Extend default endpoint description with serializer description.""" return super().describe( req, resp, type='list', fields=self.serializer.describe() if self.serializer else None, **kwargs ) def on_get(self, req, resp, **kwargs): """Respond on GET requests using ``self.list()`` handler.""" return super().on_get(req, resp, handler=self._list, **kwargs) class ListCreateAPI(CreateMixin, CreateBulkMixin, ListAPI): """Generic List/Create API with resource serialization. Generic resource that uses serializer for resource description, serialization and validation. Allowed methods: * GET: list multiple resource instances representations (handled with ``.list()`` method handler) * POST: create new resource from representation provided in request body (handled with ``.create()`` method handler) * PATCH: create multiple resources from list of representations provided in request body (handled with ``.create_bulk()`` method handler. """ def _create(self, params, meta, **kwargs): return self.serializer.to_representation( self.create(params, meta, **kwargs) ) def _create_bulk(self, params, meta, **kwargs): return [ self.serializer.to_representation(obj) for obj in self.create_bulk(params, meta, **kwargs) ] def create_bulk(self, params, meta, **kwargs): """Create items in bulk by reusing existing ``.create()`` handler. .. note:: This is default create_bulk implementation that may not be safe to use in production environment depending on your implementation of ``.create()`` method handler. """ validated = kwargs.pop('validated') return [ self.create(params, meta, validated=item) for item in validated ] def on_post(self, req, resp, **kwargs): """Respond on POST requests using ``self.create()`` handler.""" validated = self.require_validated(req) return super().on_post( req, resp, handler=partial(self._create, validated=validated), **kwargs ) def on_patch(self, req, resp, **kwargs): """Respond on PATCH requests using ``self.create_bulk()`` handler.""" validated = self.require_validated(req, bulk=True) return super().on_patch( req, resp, handler=partial(self._create_bulk, validated=validated), **kwargs ) class PaginatedListAPI(PaginatedMixin, ListAPI): """Generic List API with resource serialization and pagination. Generic resource that uses serializer for resource description, serialization and validation. Adds simple pagination to list of resources. Allowed methods: * GET: list multiple resource instances representations (handled with ``.list()`` method handler) """ def _list(self, params, meta, **kwargs): objects = super()._list(params, meta, **kwargs) # note: we need to populate meta after objects are retrieved self.add_pagination_meta(params, meta) return objects class PaginatedListCreateAPI(PaginatedMixin, ListCreateAPI): """Generic List/Create API with resource serialization and pagination. Generic resource that uses serializer for resource description, serialization and validation. Adds simple pagination to list of resources. Allowed methods: * GET: list multiple resource instances representations (handled with ``.list()`` method handler) * POST: create new resource from representation provided in request body (handled with ``.create()`` method handler) """ def _list(self, params, meta, **kwargs): objects = super()._list( params, meta, **kwargs ) # note: we need to populate meta after objects are retrieved self.add_pagination_meta(params, meta) return objects
{ "repo_name": "swistakm/graceful", "path": "src/graceful/resources/generic.py", "copies": "1", "size": "8892", "license": "bsd-3-clause", "hash": 4266670444306438000, "line_mean": 30.2, "line_max": 79, "alpha_frac": 0.6553081422, "autogenerated": false, "ratio": 4.953760445682451, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 285 }
from functools import partial from graphql.language import parse from graphql.type import GraphQLSchema from graphql.utilities import build_schema from graphql.validation import validate, FieldsOnCorrectTypeRule from .harness import assert_validation_errors assert_errors = partial(assert_validation_errors, FieldsOnCorrectTypeRule) assert_valid = partial(assert_errors, errors=[]) def describe_validate_fields_on_correct_type(): def object_field_selection(): assert_valid( """ fragment objectFieldSelection on Dog { __typename name } """ ) def aliased_object_field_selection(): assert_valid( """ fragment aliasedObjectFieldSelection on Dog { tn : __typename otherName : name } """ ) def interface_field_selection(): assert_valid( """ fragment interfaceFieldSelection on Pet { __typename name } """ ) def aliased_interface_field_selection(): assert_valid( """ fragment interfaceFieldSelection on Pet { otherName : name } """ ) def lying_alias_selection(): assert_valid( """ fragment lyingAliasSelection on Dog { name : nickname } """ ) def ignores_fields_on_unknown_type(): assert_valid( """ fragment unknownSelection on UnknownType { unknownField } """ ) def reports_errors_when_type_is_known_again(): assert_errors( """ fragment typeKnownAgain on Pet { unknown_pet_field { ... on Cat { unknown_cat_field } } }, """, [ { "message": "Cannot query field 'unknown_pet_field' on type 'Pet'.", "locations": [(3, 15)], }, { "message": "Cannot query field 'unknown_cat_field' on type 'Cat'.", "locations": [(5, 19)], }, ], ) def field_not_defined_on_fragment(): assert_errors( """ fragment fieldNotDefined on Dog { meowVolume } """, [ { "message": "Cannot query field 'meowVolume' on type 'Dog'." " Did you mean 'barkVolume'?", "locations": [(3, 15)], }, ], ) def ignores_deeply_unknown_field(): assert_errors( """ fragment deepFieldNotDefined on Dog { unknown_field { deeper_unknown_field } } """, [ { "message": "Cannot query field 'unknown_field' on type 'Dog'.", "locations": [(3, 15)], }, ], ) def sub_field_not_defined(): assert_errors( """ fragment subFieldNotDefined on Human { pets { unknown_field } } """, [ { "message": "Cannot query field 'unknown_field' on type 'Pet'.", "locations": [(4, 17)], }, ], ) def field_not_defined_on_inline_fragment(): assert_errors( """ fragment fieldNotDefined on Pet { ... on Dog { meowVolume } } """, [ { "message": "Cannot query field 'meowVolume' on type 'Dog'." " Did you mean 'barkVolume'?", "locations": [(4, 17)], }, ], ) def aliased_field_target_not_defined(): assert_errors( """ fragment aliasedFieldTargetNotDefined on Dog { volume : mooVolume } """, [ { "message": "Cannot query field 'mooVolume' on type 'Dog'." " Did you mean 'barkVolume'?", "locations": [(3, 15)], }, ], ) def aliased_lying_field_target_not_defined(): assert_errors( """ fragment aliasedLyingFieldTargetNotDefined on Dog { barkVolume : kawVolume } """, [ { "message": "Cannot query field 'kawVolume' on type 'Dog'." " Did you mean 'barkVolume'?", "locations": [(3, 15)], }, ], ) def not_defined_on_interface(): assert_errors( """ fragment notDefinedOnInterface on Pet { tailLength } """, [ { "message": "Cannot query field 'tailLength' on type 'Pet'.", "locations": [(3, 15)], }, ], ) def defined_on_implementors_but_not_on_interface(): assert_errors( """ fragment definedOnImplementorsButNotInterface on Pet { nickname } """, [ { "message": "Cannot query field 'nickname' on type 'Pet'." " Did you mean to use an inline fragment on 'Cat' or 'Dog'?", "locations": [(3, 15)], }, ], ) def meta_field_selection_on_union(): assert_valid( """ fragment directFieldSelectionOnUnion on CatOrDog { __typename } """ ) def direct_field_selection_on_union(): assert_errors( """ fragment directFieldSelectionOnUnion on CatOrDog { directField } """, [ { "message": "Cannot query field 'directField' on type 'CatOrDog'.", "locations": [(3, 15)], }, ], ) def defined_on_implementors_queried_on_union(): assert_errors( """ fragment definedOnImplementorsQueriedOnUnion on CatOrDog { name } """, [ { "message": "Cannot query field 'name' on type 'CatOrDog'." " Did you mean to use an inline fragment" " on 'Being', 'Pet', 'Canine', 'Cat', or 'Dog'?", "locations": [(3, 15)], }, ], ) def valid_field_in_inline_fragment(): assert_valid( """ fragment objectFieldSelection on Pet { ... on Dog { name } ... { name } } """ ) def describe_fields_on_correct_type_error_message(): def _error_message(schema: GraphQLSchema, query_str: str): errors = validate(schema, parse(query_str), [FieldsOnCorrectTypeRule]) assert len(errors) == 1 return errors[0].message def fields_correct_type_no_suggestion(): schema = build_schema( """ type T { fieldWithVeryLongNameThatWillNeverBeSuggested: String } type Query { t: T } """ ) assert _error_message(schema, "{ t { f } }") == ( "Cannot query field 'f' on type 'T'." ) def works_with_no_small_numbers_of_type_suggestion(): schema = build_schema( """ union T = A | B type Query { t: T } type A { f: String } type B { f: String } """ ) assert _error_message(schema, "{ t { f } }") == ( "Cannot query field 'f' on type 'T'." " Did you mean to use an inline fragment on 'A' or 'B'?" ) def works_with_no_small_numbers_of_field_suggestion(): schema = build_schema( """ type T { y: String z: String } type Query { t: T } """ ) assert _error_message(schema, "{ t { f } }") == ( "Cannot query field 'f' on type 'T'. Did you mean 'y' or 'z'?" ) def only_shows_one_set_of_suggestions_at_a_time_preferring_types(): schema = build_schema( """ interface T { y: String z: String } type Query { t: T } type A implements T { f: String y: String z: String } type B implements T { f: String y: String z: String } """ ) assert _error_message(schema, "{ t { f } }") == ( "Cannot query field 'f' on type 'T'." " Did you mean to use an inline fragment on 'A' or 'B'?" ) def sort_type_suggestions_based_on_inheritance_order(): schema = build_schema( """ interface T { bar: String } type Query { t: T } interface Z implements T { foo: String bar: String } interface Y implements Z & T { foo: String bar: String } type X implements Y & Z & T { foo: String bar: String } """ ) assert _error_message(schema, "{ t { foo } }") == ( "Cannot query field 'foo' on type 'T'." " Did you mean to use an inline fragment on 'Z', 'Y', or 'X'?" ) def limits_lots_of_type_suggestions(): schema = build_schema( """ union T = A | B | C | D | E | F type Query { t: T } type A { f: String } type B { f: String } type C { f: String } type D { f: String } type E { f: String } type F { f: String } """ ) assert _error_message(schema, "{ t { f } }") == ( "Cannot query field 'f' on type 'T'. Did you mean to use" " an inline fragment on 'A', 'B', 'C', 'D', or 'E'?" ) def limits_lots_of_field_suggestions(): schema = build_schema( """ type T { u: String v: String w: String x: String y: String z: String } type Query { t: T } """ ) assert _error_message(schema, "{ t { f } }") == ( "Cannot query field 'f' on type 'T'." " Did you mean 'u', 'v', 'w', 'x', or 'y'?" )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_fields_on_correct_type.py", "copies": "1", "size": "11433", "license": "mit", "hash": 5415206647724378000, "line_mean": 26.1567695962, "line_max": 87, "alpha_frac": 0.4051430071, "autogenerated": false, "ratio": 4.757802746566791, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5662945753666793, "avg_score": null, "num_lines": null }
from functools import partial from graphql.pyutils import Undefined, inspect from graphql.type import ( GraphQLArgument, GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLScalarType, GraphQLString, ) from graphql.validation import ValuesOfCorrectTypeRule from .harness import assert_validation_errors assert_errors = partial(assert_validation_errors, ValuesOfCorrectTypeRule) assert_valid = partial(assert_errors, errors=[]) def describe_validate_values_of_correct_type(): def describe_valid_values(): def good_int_value(): assert_valid( """ { complicatedArgs { intArgField(intArg: 2) } } """ ) def good_negative_int_value(): assert_valid( """ { complicatedArgs { intArgField(intArg: -2) } } """ ) def good_boolean_value(): assert_valid( """ { complicatedArgs { booleanArgField(intArg: true) } } """ ) def good_string_value(): assert_valid( """ { complicatedArgs { stringArgField(intArg: "foo") } } """ ) def good_float_value(): assert_valid( """ { complicatedArgs { floatArgField(intArg: 1.1) } } """ ) def good_negative_float_value(): assert_valid( """ { complicatedArgs { floatArgField(intArg: -1.1) } } """ ) def int_into_id(): assert_valid( """ { complicatedArgs { idArgField(idArg: 1) } } """ ) def string_into_id(): assert_valid( """ { complicatedArgs { idArgField(idArg: "someIdString") } } """ ) def good_enum_value(): assert_valid( """ { dog { doesKnowCommand(dogCommand: SIT) } } """ ) def enum_with_undefined_value(): assert_valid( """ { complicatedArgs { enumArgField(enumArg: UNKNOWN) } } """ ) def enum_with_null_value(): assert_valid( """ { complicatedArgs { enumArgField(enumArg: NO_FUR) } } """ ) def null_into_nullable_type(): assert_valid( """ { complicatedArgs { intArgField(intArg: null) } } """ ) assert_valid( """ { dog(a: null, b: null, c:{ requiredField: true, intField: null }) { name } } """ ) def describe_invalid_string_values(): def int_into_string(): assert_errors( """ { complicatedArgs { stringArgField(stringArg: 1) } } """, [ { "message": "String cannot represent a non string value: 1", "locations": [(4, 47)], }, ], ) def float_into_string(): assert_errors( """ { complicatedArgs { stringArgField(stringArg: 1.0) } } """, [ { "message": "String cannot represent a non string value: 1.0", "locations": [(4, 47)], }, ], ) def boolean_into_string(): assert_errors( """ { complicatedArgs { stringArgField(stringArg: true) } } """, [ { "message": "String cannot represent a non string value: true", "locations": [(4, 47)], }, ], ) def unquoted_string_into_string(): assert_errors( """ { complicatedArgs { stringArgField(stringArg: BAR) } } """, [ { "message": "String cannot represent a non string value: BAR", "locations": [(4, 47)], }, ], ) def describe_invalid_int_values(): def string_into_int(): assert_errors( """ { complicatedArgs { intArgField(intArg: "3") } } """, [ { "message": 'Int cannot represent non-integer value: "3"', "locations": [(4, 41)], }, ], ) def big_int_into_int(): assert_errors( """ { complicatedArgs { intArgField(intArg: 829384293849283498239482938) } } """, [ { "message": "Int cannot represent non 32-bit signed integer" " value: 829384293849283498239482938", "locations": [(4, 41)], }, ], ) def unquoted_string_into_int(): assert_errors( """ { complicatedArgs { intArgField(intArg: FOO) } } """, [ { "message": "Int cannot represent non-integer value: FOO", "locations": [(4, 41)], }, ], ) def simple_float_into_int(): assert_errors( """ { complicatedArgs { intArgField(intArg: 3.0) } } """, [ { "message": "Int cannot represent non-integer value: 3.0", "locations": [(4, 41)], } ], ) def float_into_int(): assert_errors( """ { complicatedArgs { intArgField(intArg: 3.333) } } """, [ { "message": "Int cannot represent non-integer value: 3.333", "locations": [(4, 41)], }, ], ) def describe_invalid_float_values(): def string_into_float(): assert_errors( """ { complicatedArgs { floatArgField(floatArg: "3.333") } } """, [ { "message": 'Float cannot represent non numeric value: "3.333"', "locations": [(4, 45)], }, ], ) def boolean_into_float(): assert_errors( """ { complicatedArgs { floatArgField(floatArg: true) } } """, [ { "message": "Float cannot represent non numeric value: true", "locations": [(4, 45)], }, ], ) def unquoted_into_float(): assert_errors( """ { complicatedArgs { floatArgField(floatArg: FOO) } } """, [ { "message": "Float cannot represent non numeric value: FOO", "locations": [(4, 45)], }, ], ) def describe_invalid_boolean_value(): def int_into_boolean(): assert_errors( """ { complicatedArgs { booleanArgField(booleanArg: 2) } } """, [ { "message": "Boolean cannot represent a non boolean value: 2", "locations": [(4, 49)], }, ], ) def float_into_boolean(): assert_errors( """ { complicatedArgs { booleanArgField(booleanArg: 1.0) } } """, [ { "message": "Boolean cannot represent a non boolean value: 1.0", "locations": [(4, 49)], } ], ) def string_into_boolean(): assert_errors( """ { complicatedArgs { booleanArgField(booleanArg: "true") } } """, [ { "message": "Boolean cannot represent a non boolean value:" ' "true"', "locations": [(4, 49)], } ], ) def unquoted_into_boolean(): assert_errors( """ { complicatedArgs { booleanArgField(booleanArg: TRUE) } } """, [ { "message": "Boolean cannot represent a non boolean value: TRUE", "locations": [(4, 49)], }, ], ) def describe_invalid_id_value(): def float_into_id(): assert_errors( """ { complicatedArgs { idArgField(idArg: 1.0) } } """, [ { "message": "ID cannot represent a non-string" " and non-integer value: 1.0", "locations": [(4, 39)], } ], ) def boolean_into_id(): assert_errors( """ { complicatedArgs { idArgField(idArg: true) } } """, [ { "message": "ID cannot represent a non-string" " and non-integer value: true", "locations": [(4, 39)], }, ], ) def unquoted_into_id(): assert_errors( """ { complicatedArgs { idArgField(idArg: SOMETHING) } } """, [ { "message": "ID cannot represent a non-string" " and non-integer value: SOMETHING", "locations": [(4, 39)], }, ], ) def describe_invalid_enum_value(): def int_into_enum(): assert_errors( """ { dog { doesKnowCommand(dogCommand: 2) } } """, [ { "message": "Enum 'DogCommand' cannot represent non-enum value:" " 2.", "locations": [(4, 49)], }, ], ) def float_into_enum(): assert_errors( """ { dog { doesKnowCommand(dogCommand: 1.0) } } """, [ { "message": "Enum 'DogCommand' cannot represent non-enum value:" " 1.0.", "locations": [(4, 49)], }, ], ) def string_into_enum(): assert_errors( """ { dog { doesKnowCommand(dogCommand: "SIT") } } """, [ { "message": "Enum 'DogCommand' cannot represent non-enum value:" ' "SIT".' " Did you mean the enum value 'SIT'?", "locations": [(4, 49)], }, ], ) def boolean_into_enum(): assert_errors( """ { dog { doesKnowCommand(dogCommand: true) } } """, [ { "message": "Enum 'DogCommand' cannot represent non-enum value:" " true.", "locations": [(4, 49)], }, ], ) def unknown_enum_value_into_enum(): assert_errors( """ { dog { doesKnowCommand(dogCommand: JUGGLE) } } """, [ { "message": "Value 'JUGGLE'" " does not exist in 'DogCommand' enum.", "locations": [(4, 49)], }, ], ) def different_case_enum_value_into_enum(): assert_errors( """ { dog { doesKnowCommand(dogCommand: sit) } } """, [ { "message": "Value 'sit' does not exist in 'DogCommand' enum." " Did you mean the enum value 'SIT'?", "locations": [(4, 49)], }, ], ) def describe_valid_list_value(): def good_list_value(): assert_valid( """ { complicatedArgs { stringListArgField(stringListArg: ["one", null, "two"]) } } """ ) def empty_list_value(): assert_valid( """ { complicatedArgs { stringListArgField(stringListArg: []) } } """ ) def null_value(): assert_valid( """ { complicatedArgs { stringListArgField(stringListArg: null) } } """ ) def single_value_into_list(): assert_valid( """ { complicatedArgs { stringListArgField(stringListArg: "one") } } """ ) def describe_invalid_list_value(): def incorrect_item_type(): assert_errors( """ { complicatedArgs { stringListArgField(stringListArg: ["one", 2]) } } """, [ { "message": "String cannot represent a non string value: 2", "locations": [(4, 63)], }, ], ) def single_value_of_incorrect_type(): assert_errors( """ { complicatedArgs { stringListArgField(stringListArg: 1) } } """, [ { "message": "String cannot represent a non string value: 1", "locations": [(4, 55)], }, ], ) def describe_valid_non_nullable_value(): def arg_on_optional_arg(): assert_valid( """ { dog { isHouseTrained(atOtherHomes: true) } } """ ) def no_arg_on_optional_arg(): assert_valid( """ { dog { isHouseTrained } } """ ) def multiple_args(): assert_valid( """ { complicatedArgs { multipleReqs(req1: 1, req2: 2) } } """ ) def multiple_args_reverse_order(): assert_valid( """ { complicatedArgs { multipleReqs(req2: 2, req1: 1) } } """ ) def no_args_on_multiple_optional(): assert_valid( """ { complicatedArgs { multipleOpts } } """ ) def one_arg_on_multiple_optional(): assert_valid( """ { complicatedArgs { multipleOpts(opt1: 1) } } """ ) def second_arg_on_multiple_optional(): assert_valid( """ { complicatedArgs { multipleOpts(opt2: 1) } } """ ) def multiple_required_args_on_mixed_list(): assert_valid( """ { complicatedArgs { multipleOptAndReq(req1: 3, req2: 4) } } """ ) def multiple_required_and_one_optional_arg_on_mixed_list(): assert_valid( """ { complicatedArgs { multipleOptAndReq(req1: 3, req2: 4, opt1: 5) } } """ ) def all_required_and_optional_args_on_mixed_list(): assert_valid( """ { complicatedArgs { multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6) } } """ ) def describe_invalid_non_nullable_value(): def incorrect_value_type(): assert_errors( """ { complicatedArgs { multipleReqs(req2: "two", req1: "one") } } """, [ { "message": 'Int cannot represent non-integer value: "two"', "locations": [(4, 40)], }, { "message": 'Int cannot represent non-integer value: "one"', "locations": [(4, 53)], }, ], ) def incorrect_value_and_missing_argument_provided_required_arguments(): assert_errors( """ { complicatedArgs { multipleReqs(req1: "one") } } """, [ { "message": 'Int cannot represent non-integer value: "one"', "locations": [(4, 40)], }, ], ) def null_value(): assert_errors( """ { complicatedArgs { multipleReqs(req1: null) } } """, [ { "message": "Expected value of type 'Int!', found null.", "locations": [(4, 40)], }, ], ) def describe_valid_input_object_value(): def optional_arg_despite_required_field_in_type(): assert_valid( """ { complicatedArgs { complexArgField } } """ ) def partial_object_only_required(): assert_valid( """ { complicatedArgs { complexArgField(complexArg: { requiredField: true }) } } """ ) def partial_object_required_field_can_be_falsy(): assert_valid( """ { complicatedArgs { complexArgField(complexArg: { requiredField: false }) } } """ ) def partial_object_including_required(): assert_valid( """ { complicatedArgs { complexArgField(complexArg: { requiredField: true, intField: 4 }) } } """ ) def full_object(): assert_valid( """ { complicatedArgs { complexArgField(complexArg: { requiredField: true, intField: 4, stringField: "foo", booleanField: false, stringListField: ["one", "two"] }) } } """ ) def full_object_with_fields_in_different_order(): assert_valid( """ { complicatedArgs { complexArgField(complexArg: { stringListField: ["one", "two"], booleanField: false, requiredField: true, stringField: "foo", intField: 4, }) } } """ ) def describe_invalid_input_object_value(): def partial_object_missing_required(): assert_errors( """ { complicatedArgs { complexArgField(complexArg: { intField: 4 }) } } """, [ { "message": "Field 'ComplexInput.requiredField'" " of required type 'Boolean!' was not provided.", "locations": [(4, 49)], }, ], ) def partial_object_invalid_field_type(): assert_errors( """ { complicatedArgs { complexArgField(complexArg: { stringListField: ["one", 2], requiredField: true, }) } } """, [ { "message": "String cannot represent a non string value: 2", "locations": [(5, 48)], }, ], ) def partial_object_null_to_non_null_field(): assert_errors( """ { complicatedArgs { complexArgField(complexArg: { requiredField: true, nonNullField: null, }) } } """, [ { "message": "Expected value of type 'Boolean!', found null.", "locations": [(6, 37)], } ], ) def partial_object_unknown_field_arg(): assert_errors( """ { complicatedArgs { complexArgField(complexArg: { requiredField: true, invalidField: "value" }) } } """, [ { "message": "Field 'invalidField'" " is not defined by type 'ComplexInput'." " Did you mean 'intField'?", "locations": [(6, 23)], }, ], ) def reports_original_error_for_custom_scalar_which_throws(): def parse_value(value): raise Exception(f"Invalid scalar is always invalid: {inspect(value)}") custom_scalar = GraphQLScalarType("Invalid", parse_value=parse_value) schema = GraphQLSchema( query=GraphQLObjectType( "Query", { "invalidArg": GraphQLField( GraphQLString, {"arg": GraphQLArgument(custom_scalar)} ) }, ) ) errors = assert_errors( "{ invalidArg(arg: 123) }", [ { "message": "Expected value of type 'Invalid', found 123;" " Invalid scalar is always invalid: 123", "locations": [(1, 19)], } ], schema=schema, ) assert str(errors[0].original_error) == ( "Invalid scalar is always invalid: 123" ) def reports_error_for_custom_scalar_that_returns_undefined(): custom_scalar = GraphQLScalarType( "CustomScalar", parse_value=lambda value: Undefined ) schema = GraphQLSchema( GraphQLObjectType( "Query", { "invalidArg": GraphQLField( GraphQLString, args={"arg": GraphQLArgument(custom_scalar)} ) }, ) ) assert_errors( "{ invalidArg(arg: 123) }", [ { "message": "Expected value of type 'CustomScalar', found 123.", "locations": [(1, 19)], }, ], schema=schema, ) def allows_custom_scalar_to_accept_complex_literals(): custom_scalar = GraphQLScalarType("Any") schema = GraphQLSchema( query=GraphQLObjectType( "Query", { "anyArg": GraphQLField( GraphQLString, {"arg": GraphQLArgument(custom_scalar)} ) }, ) ) assert_valid( """ { test1: anyArg(arg: 123) test2: anyArg(arg: "abc") test3: anyArg(arg: [123, "abc"]) test4: anyArg(arg: {deep: [123, "abc"]}) } """, schema=schema, ) def describe_directive_arguments(): def with_directives_of_valid_types(): assert_valid( """ { dog @include(if: true) { name } human @skip(if: false) { name } } """ ) def with_directives_with_incorrect_types(): assert_errors( """ { dog @include(if: "yes") { name @skip(if: ENUM) } } """, [ { "message": "Boolean cannot represent a non boolean value:" ' "yes"', "locations": [(3, 36)], }, { "message": "Boolean cannot represent a non boolean value: ENUM", "locations": [(4, 36)], }, ], ) def describe_variable_default_values(): def variables_with_valid_default_values(): assert_valid( """ query WithDefaultValues( $a: Int = 1, $b: String = "ok", $c: ComplexInput = { requiredField: true, intField: 3 } $d: Int! = 123 ) { dog { name } } """ ) def variables_with_valid_default_null_values(): assert_valid( """ query WithDefaultValues( $a: Int = null, $b: String = null, $c: ComplexInput = { requiredField: true, intField: null } ) { dog { name } } """ ) def variables_with_invalid_default_null_values(): assert_errors( """ query WithDefaultValues( $a: Int! = null, $b: String! = null, $c: ComplexInput = { requiredField: null, intField: null } ) { dog { name } } """, [ { "message": "Expected value of type 'Int!', found null.", "locations": [(3, 30)], }, { "message": "Expected value of type 'String!', found null.", "locations": [(4, 33)], }, { "message": "Expected value of type 'Boolean!', found null.", "locations": [(5, 55)], }, ], ) def variables_with_invalid_default_values(): assert_errors( """ query InvalidDefaultValues( $a: Int = "one", $b: String = 4, $c: ComplexInput = "NotVeryComplex" ) { dog { name } } """, [ { "message": 'Int cannot represent non-integer value: "one"', "locations": [(3, 29)], }, { "message": "String cannot represent a non string value: 4", "locations": [(4, 32)], }, { "message": "Expected value of type 'ComplexInput'," ' found "NotVeryComplex".', "locations": [(5, 38)], }, ], ) def variables_with_complex_invalid_default_values(): assert_errors( """ query WithDefaultValues( $a: ComplexInput = { requiredField: 123, intField: "abc" } ) { dog { name } } """, [ { "message": "Boolean cannot represent a non boolean value: 123", "locations": [(3, 55)], }, { "message": 'Int cannot represent non-integer value: "abc"', "locations": [(3, 70)], }, ], ) def complex_variables_missing_required_fields(): assert_errors( """ query MissingRequiredField($a: ComplexInput = {intField: 3}) { dog { name } } """, [ { "message": "Field 'ComplexInput.requiredField'" " of required type 'Boolean!' was not provided.", "locations": [(2, 63)], }, ], ) def list_variables_with_invalid_item(): assert_errors( """ query InvalidItem($a: [String] = ["one", 2]) { dog { name } } """, [ { "message": "String cannot represent a non string value: 2", "locations": [(2, 58)], }, ], )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_values_of_correct_type.py", "copies": "1", "size": "36133", "license": "mit", "hash": -4194868772202091500, "line_mean": 27.4511811024, "line_max": 88, "alpha_frac": 0.3049290123, "autogenerated": false, "ratio": 5.9912120709666725, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6796141083266674, "avg_score": null, "num_lines": null }
from functools import partial from graphql.utilities import build_schema from graphql.validation import KnownDirectivesRule from .harness import assert_validation_errors, assert_sdl_validation_errors assert_errors = partial(assert_validation_errors, KnownDirectivesRule) assert_valid = partial(assert_errors, errors=[]) assert_sdl_errors = partial(assert_sdl_validation_errors, KnownDirectivesRule) assert_sdl_valid = partial(assert_sdl_errors, errors=[]) schema_with_sdl_directives = build_schema( """ directive @onSchema on SCHEMA directive @onScalar on SCALAR directive @onObject on OBJECT directive @onFieldDefinition on FIELD_DEFINITION directive @onArgumentDefinition on ARGUMENT_DEFINITION directive @onInterface on INTERFACE directive @onUnion on UNION directive @onEnum on ENUM directive @onEnumValue on ENUM_VALUE directive @onInputObject on INPUT_OBJECT directive @onInputFieldDefinition on INPUT_FIELD_DEFINITION """ ) def describe_known_directives(): def with_no_directives(): assert_valid( """ query Foo { name ...Frag } fragment Frag on Dog { name } """ ) def with_known_directives(): assert_valid( """ { dog @include(if: true) { name } human @skip(if: false) { name } } """ ) def with_unknown_directive(): assert_errors( """ { dog @unknown(directive: "value") { name } } """, [{"message": "Unknown directive '@unknown'.", "locations": [(3, 19)]}], ) def with_many_unknown_directives(): assert_errors( """ { dog @unknown(directive: "value") { name } human @unknown(directive: "value") { name pets @unknown(directive: "value") { name } } } """, [ {"message": "Unknown directive '@unknown'.", "locations": [(3, 19)]}, {"message": "Unknown directive '@unknown'.", "locations": [(6, 21)]}, {"message": "Unknown directive '@unknown'.", "locations": [(8, 22)]}, ], ) def with_well_placed_directives(): assert_valid( """ query ($var: Boolean) @onQuery { name @include(if: $var) ...Frag @include(if: true) skippedField @skip(if: true) ...SkippedFrag @skip(if: true) ... @skip(if: true) { skippedField } } mutation @onMutation { someField } subscription @onSubscription { someField } fragment Frag on SomeType @onFragmentDefinition { someField } """ ) def with_well_placed_variable_definition_directive(): assert_valid( """ query Foo($var: Boolean @onVariableDefinition) { name } """ ) def with_misplaced_directives(): assert_errors( """ query Foo($var: Boolean) @include(if: true) { name @onQuery @include(if: $var) ...Frag @onQuery } mutation Bar @onQuery { someField } """, [ { "message": "Directive '@include' may not be used on query.", "locations": [(2, 38)], }, { "message": "Directive '@onQuery' may not be used on field.", "locations": [(3, 20)], }, { "message": "Directive '@onQuery'" " may not be used on fragment spread.", "locations": [(4, 23)], }, { "message": "Directive '@onQuery' may not be used on mutation.", "locations": [(7, 26)], }, ], ) def with_misplaced_variable_definition_directive(): assert_errors( """ query Foo($var: Boolean @onField) { name } """, [ { "message": "Directive '@onField'" " may not be used on variable definition.", "locations": [(2, 37)], }, ], ) def describe_within_sdl(): def with_directive_defined_inside_sdl(): assert_sdl_valid( """ type Query { foo: String @test } directive @test on FIELD_DEFINITION """ ) def with_standard_directive(): assert_sdl_valid( """ type Query { foo: String @deprecated } """ ) def with_overridden_standard_directive(): assert_sdl_valid( """ schema @deprecated { query: Query } directive @deprecated on SCHEMA """ ) def with_directive_defined_in_schema_extension(): schema = build_schema( """ type Query { foo: String } """ ) assert_sdl_valid( """ directive @test on OBJECT extend type Query @test """, schema=schema, ) def with_directive_used_in_schema_extension(): schema = build_schema( """ directive @test on OBJECT type Query { foo: String } """ ) assert_sdl_valid( """ extend type Query @test """, schema=schema, ) def with_unknown_directive_in_schema_extension(): schema = build_schema( """ type Query { foo: String } """ ) assert_sdl_errors( """ extend type Query @unknown """, [{"message": "Unknown directive '@unknown'.", "locations": [(2, 35)]}], schema, ) def with_well_placed_directives(): assert_sdl_valid( """ type MyObj implements MyInterface @onObject { myField(myArg: Int @onArgumentDefinition): String @onFieldDefinition } extend type MyObj @onObject scalar MyScalar @onScalar extend scalar MyScalar @onScalar interface MyInterface @onInterface { myField(myArg: Int @onArgumentDefinition): String @onFieldDefinition } extend interface MyInterface @onInterface union MyUnion @onUnion = MyObj | Other extend union MyUnion @onUnion enum MyEnum @onEnum { MY_VALUE @onEnumValue } extend enum MyEnum @onEnum input MyInput @onInputObject { myField: Int @onInputFieldDefinition } extend input MyInput @onInputObject schema @onSchema { query: MyQuery } extend schema @onSchema """, schema=schema_with_sdl_directives, ) def with_misplaced_directives(): assert_sdl_errors( """ type MyObj implements MyInterface @onInterface { myField(myArg: Int @onInputFieldDefinition): String @onInputFieldDefinition } scalar MyScalar @onEnum interface MyInterface @onObject { myField(myArg: Int @onInputFieldDefinition): String @onInputFieldDefinition } union MyUnion @onEnumValue = MyObj | Other enum MyEnum @onScalar { MY_VALUE @onUnion } input MyInput @onEnum { myField: Int @onArgumentDefinition } schema @onObject { query: MyQuery } extend schema @onObject """, # noqa: E501 [ { "message": "Directive '@onInterface'" " may not be used on object.", "locations": [(2, 51)], }, { "message": "Directive '@onInputFieldDefinition'" " may not be used on argument definition.", "locations": [(3, 38)], }, { "message": "Directive '@onInputFieldDefinition'" " may not be used on field definition.", "locations": [(3, 71)], }, { "message": "Directive '@onEnum' may not be used on scalar.", "locations": [(6, 33)], }, { "message": "Directive '@onObject'" " may not be used on interface.", "locations": [(8, 39)], }, { "message": "Directive '@onInputFieldDefinition'" " may not be used on argument definition.", "locations": [(9, 38)], }, { "message": "Directive '@onInputFieldDefinition'" " may not be used on field definition.", "locations": [(9, 71)], }, { "message": "Directive '@onEnumValue' may not be used on union.", "locations": [(12, 31)], }, { "message": "Directive '@onScalar' may not be used on enum.", "locations": [(14, 29)], }, { "message": "Directive '@onUnion'" " may not be used on enum value.", "locations": [(15, 28)], }, { "message": "Directive '@onEnum'" " may not be used on input object.", "locations": [(18, 31)], }, { "message": "Directive '@onArgumentDefinition'" " may not be used on input field definition.", "locations": [(19, 32)], }, { "message": "Directive '@onObject' may not be used on schema.", "locations": [(22, 24)], }, { "message": "Directive '@onObject' may not be used on schema.", "locations": [(26, 31)], }, ], schema_with_sdl_directives, )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_known_directives.py", "copies": "1", "size": "12147", "license": "mit", "hash": -8385681263942822000, "line_mean": 28.9925925926, "line_max": 93, "alpha_frac": 0.4017452869, "autogenerated": false, "ratio": 5.55672461116194, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.645846989806194, "avg_score": null, "num_lines": null }
from functools import partial from graphql.utilities import build_schema from graphql.validation import KnownTypeNamesRule from .harness import assert_validation_errors, assert_sdl_validation_errors assert_errors = partial(assert_validation_errors, KnownTypeNamesRule) assert_valid = partial(assert_errors, errors=[]) assert_sdl_errors = partial(assert_sdl_validation_errors, KnownTypeNamesRule) assert_sdl_valid = partial(assert_sdl_errors, errors=[]) def describe_validate_known_type_names(): def known_type_names_are_valid(): assert_valid( """ query Foo( $var: String $required: [Int!]! $introspectionType: __EnumValue ) { user(id: 4) { pets { ... on Pet { name }, ...PetFields, ... { name } } } } fragment PetFields on Pet { name } """ ) def unknown_type_names_are_invalid(): assert_errors( """ query Foo($var: JumbledUpLetters) { user(id: 4) { name pets { ... on Badger { name }, ...PetFields, ... { name } } } } fragment PetFields on Peat { name } """, [ { "message": "Unknown type 'JumbledUpLetters'.", "locations": [(2, 29)], }, {"message": "Unknown type 'Badger'.", "locations": [(5, 31)]}, { "message": "Unknown type 'Peat'. Did you mean 'Pet' or 'Cat'?", "locations": [(8, 35)], }, ], ) def references_to_standard_scalars_that_are_missing_in_schema(): schema = build_schema("type Query { foo: String }") query = """ query ($id: ID, $float: Float, $int: Int) { __typename } """ assert_errors( query, [ {"message": "Unknown type 'ID'.", "locations": [(2, 25)]}, {"message": "Unknown type 'Float'.", "locations": [(2, 37)]}, {"message": "Unknown type 'Int'.", "locations": [(2, 50)]}, ], schema, ) def describe_within_sdl(): def use_standard_types(): assert_sdl_valid( """ type Query { string: String int: Int float: Float boolean: Boolean id: ID introspectionType: __EnumValue } """ ) def reference_types_defined_inside_the_same_document(): assert_sdl_valid( """ union SomeUnion = SomeObject | AnotherObject type SomeObject implements SomeInterface { someScalar(arg: SomeInputObject): SomeScalar } type AnotherObject { foo(arg: SomeInputObject): String } type SomeInterface { someScalar(arg: SomeInputObject): SomeScalar } input SomeInputObject { someScalar: SomeScalar } scalar SomeScalar type RootQuery { someInterface: SomeInterface someUnion: SomeUnion someScalar: SomeScalar someObject: SomeObject } schema { query: RootQuery } """ ) def unknown_type_references(): assert_sdl_errors( """ type A type B type SomeObject implements C { e(d: D): E } union SomeUnion = F | G interface SomeInterface { i(h: H): I } input SomeInput { j: J } directive @SomeDirective(k: K) on QUERY schema { query: L mutation: M subscription: N } """, [ { "message": "Unknown type 'C'. Did you mean 'A' or 'B'?", "locations": [(5, 44)], }, { "message": "Unknown type 'D'. Did you mean 'A', 'B', or 'ID'?", "locations": [(6, 24)], }, { "message": "Unknown type 'E'. Did you mean 'A' or 'B'?", "locations": [(6, 28)], }, { "message": "Unknown type 'F'. Did you mean 'A' or 'B'?", "locations": [(9, 35)], }, { "message": "Unknown type 'G'. Did you mean 'A' or 'B'?", "locations": [(9, 39)], }, { "message": "Unknown type 'H'. Did you mean 'A' or 'B'?", "locations": [(12, 24)], }, { "message": "Unknown type 'I'. Did you mean 'A', 'B', or 'ID'?", "locations": [(12, 28)], }, { "message": "Unknown type 'J'. Did you mean 'A' or 'B'?", "locations": [(16, 22)], }, { "message": "Unknown type 'K'. Did you mean 'A' or 'B'?", "locations": [(19, 45)], }, { "message": "Unknown type 'L'. Did you mean 'A' or 'B'?", "locations": [(22, 26)], }, { "message": "Unknown type 'M'. Did you mean 'A' or 'B'?", "locations": [(23, 29)], }, { "message": "Unknown type 'N'. Did you mean 'A' or 'B'?", "locations": [(24, 33)], }, ], ) def does_not_consider_non_type_definitions(): assert_sdl_errors( """ query Foo { __typename } fragment Foo on Query { __typename } directive @Foo on QUERY type Query { foo: Foo } """, [{"message": "Unknown type 'Foo'.", "locations": [(7, 24)]}], ) def reference_standard_types_inside_extension_document(): schema = build_schema("type Foo") sdl = """ type SomeType { string: String int: Int float: Float boolean: Boolean id: ID introspectionType: __EnumValue } """ assert_sdl_valid(sdl, schema=schema) def reference_types_inside_extension_document(): schema = build_schema("type Foo") sdl = """ type QueryRoot { foo: Foo bar: Bar } scalar Bar schema { query: QueryRoot } """ assert_sdl_valid(sdl, schema=schema) def unknown_type_references_inside_extension_document(): schema = build_schema("type A") sdl = """ type B type SomeObject implements C { e(d: D): E } union SomeUnion = F | G interface SomeInterface { i(h: H): I } input SomeInput { j: J } directive @SomeDirective(k: K) on QUERY schema { query: L mutation: M subscription: N } """ assert_sdl_errors( sdl, [ { "message": "Unknown type 'C'. Did you mean 'A' or 'B'?", "locations": [(4, 44)], }, { "message": "Unknown type 'D'. Did you mean 'A', 'B', or 'ID'?", "locations": [(5, 24)], }, { "message": "Unknown type 'E'. Did you mean 'A' or 'B'?", "locations": [(5, 28)], }, { "message": "Unknown type 'F'. Did you mean 'A' or 'B'?", "locations": [(8, 35)], }, { "message": "Unknown type 'G'. Did you mean 'A' or 'B'?", "locations": [(8, 39)], }, { "message": "Unknown type 'H'. Did you mean 'A' or 'B'?", "locations": [(11, 24)], }, { "message": "Unknown type 'I'. Did you mean 'A', 'B', or 'ID'?", "locations": [(11, 28)], }, { "message": "Unknown type 'J'. Did you mean 'A' or 'B'?", "locations": [(15, 22)], }, { "message": "Unknown type 'K'. Did you mean 'A' or 'B'?", "locations": [(18, 45)], }, { "message": "Unknown type 'L'. Did you mean 'A' or 'B'?", "locations": [(21, 26)], }, { "message": "Unknown type 'M'. Did you mean 'A' or 'B'?", "locations": [(22, 29)], }, { "message": "Unknown type 'N'. Did you mean 'A' or 'B'?", "locations": [(23, 33)], }, ], schema, )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_known_type_names.py", "copies": "1", "size": "10753", "license": "mit", "hash": -8069645148541838000, "line_mean": 30.8136094675, "line_max": 87, "alpha_frac": 0.3444620106, "autogenerated": false, "ratio": 5.2300583657587545, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6074520376358754, "avg_score": null, "num_lines": null }
from functools import partial from graphql.utilities import build_schema from graphql.validation import NoSchemaIntrospectionCustomRule from .harness import assert_validation_errors schema = build_schema( """ type Query { someQuery: SomeType } type SomeType { someField: String introspectionField: __EnumValue } """ ) assert_errors = partial( assert_validation_errors, NoSchemaIntrospectionCustomRule, schema=schema ) assert_valid = partial(assert_errors, errors=[]) def describe_validate_prohibit_introspection_queries(): def ignores_valid_fields_including_typename(): assert_valid( """ { someQuery { __typename someField } } """ ) def ignores_fields_not_in_the_schema(): assert_valid( """ { __introspect } """ ) def reports_error_when_a_field_with_an_introspection_type_is_requested(): assert_errors( """ { __schema { queryType { name } } } """, [ { "message": "GraphQL introspection has been disabled," " but the requested query contained the field '__schema'.", "locations": [(3, 15)], }, { "message": "GraphQL introspection has been disabled," " but the requested query contained the field 'queryType'.", "locations": [(4, 17)], }, ], ) def reports_error_when_a_field_with_introspection_type_is_requested_and_aliased(): assert_errors( """ { s: __schema { queryType { name } } } """, [ { "message": "GraphQL introspection has been disabled," " but the requested query contained the field '__schema'.", "locations": [(3, 15)], }, { "message": "GraphQL introspection has been disabled," " but the requested query contained the field 'queryType'.", "locations": [(4, 17)], }, ], ) def reports_error_when_using_a_fragment_with_a_field_with_an_introspection_type(): assert_errors( """ { ...QueryFragment } fragment QueryFragment on Query { __schema { queryType { name } } } """, [ { "message": "GraphQL introspection has been disabled," " but the requested query contained the field '__schema'.", "locations": [(7, 15)], }, { "message": "GraphQL introspection has been disabled," " but the requested query contained the field 'queryType'.", "locations": [(8, 17)], }, ], ) def reports_error_for_non_standard_introspection_fields(): assert_errors( """ { someQuery { introspectionField } } """, [ { "message": "GraphQL introspection has been disabled, but" " the requested query contained the field 'introspectionField'.", "locations": [(4, 17)], }, ], )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_no_schema_introspection.py", "copies": "1", "size": "3950", "license": "mit", "hash": -4762846292655392000, "line_mean": 26.2413793103, "line_max": 86, "alpha_frac": 0.4212658228, "autogenerated": false, "ratio": 5.5013927576601676, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6422658580460168, "avg_score": null, "num_lines": null }
from functools import partial from graphql.utilities import build_schema from graphql.validation import OverlappingFieldsCanBeMergedRule from .harness import assert_validation_errors assert_errors = partial(assert_validation_errors, OverlappingFieldsCanBeMergedRule) assert_valid = partial(assert_errors, errors=[]) def describe_validate_overlapping_fields_can_be_merged(): def unique_fields(): assert_valid( """ fragment uniqueFields on Dog { name nickname } """ ) def identical_fields(): assert_valid( """ fragment mergeIdenticalFields on Dog { name name } """ ) def identical_fields_with_identical_args(): assert_valid( """ fragment mergeIdenticalFieldsWithIdenticalArgs on Dog { doesKnowCommand(dogCommand: SIT) doesKnowCommand(dogCommand: SIT) } """ ) def identical_fields_with_identical_directives(): assert_valid( """ fragment mergeSameFieldsWithSameDirectives on Dog { name @include(if: true) name @include(if: true) } """ ) def different_args_with_different_aliases(): assert_valid( """ fragment differentArgsWithDifferentAliases on Dog { knowsSit: doesKnowCommand(dogCommand: SIT) knowsDown: doesKnowCommand(dogCommand: DOWN) } """ ) def different_directives_with_different_aliases(): assert_valid( """ fragment differentDirectivesWithDifferentAliases on Dog { nameIfTrue: name @include(if: true) nameIfFalse: name @include(if: false) } """ ) def different_skip_or_include_directives_accepted(): # Note: Differing skip/include directives don't create an ambiguous # return value and are acceptable in conditions where differing runtime # values may have the same desired effect of including/skipping a field assert_valid( """ fragment differentDirectivesWithDifferentAliases on Dog { name @include(if: true) name @include(if: false) } """ ) def same_aliases_with_different_field_targets(): assert_errors( """ fragment sameAliasesWithDifferentFieldTargets on Dog { fido: name fido: nickname } """, [ { "message": "Fields 'fido' conflict" " because 'name' and 'nickname' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(3, 15), (4, 15)], "path": None, } ], ) def same_aliases_allowed_on_non_overlapping_fields(): assert_valid( """ fragment sameAliasesWithDifferentFieldTargets on Pet { ... on Dog { name } ... on Cat { name: nickname } } """ ) def alias_masking_direct_field_access(): assert_errors( """ fragment aliasMaskingDirectFieldAccess on Dog { name: nickname name } """, [ { "message": "Fields 'name' conflict" " because 'nickname' and 'name' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(3, 15), (4, 15)], } ], ) def different_args_second_adds_an_argument(): assert_errors( """ fragment conflictingArgs on Dog { doesKnowCommand doesKnowCommand(dogCommand: HEEL) } """, [ { "message": "Fields 'doesKnowCommand' conflict" " because they have differing arguments." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(3, 15), (4, 15)], } ], ) def different_args_second_missing_an_argument(): assert_errors( """ fragment conflictingArgs on Dog { doesKnowCommand(dogCommand: SIT) doesKnowCommand } """, [ { "message": "Fields 'doesKnowCommand' conflict" " because they have differing arguments." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(3, 15), (4, 15)], } ], ) def conflicting_arg_values(): assert_errors( """ fragment conflictingArgs on Dog { doesKnowCommand(dogCommand: SIT) doesKnowCommand(dogCommand: HEEL) } """, [ { "message": "Fields 'doesKnowCommand' conflict" " because they have differing arguments." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(3, 15), (4, 15)], } ], ) def conflicting_arg_names(): assert_errors( """ fragment conflictingArgs on Dog { isAtLocation(x: 0) isAtLocation(y: 0) } """, [ { "message": "Fields 'isAtLocation' conflict" " because they have differing arguments." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(3, 15), (4, 15)], } ], ) def allows_different_args_where_no_conflict_is_possible(): # This is valid since no object can be both a "Dog" and a "Cat", thus # these fields can never overlap. assert_valid( """ fragment conflictingArgs on Pet { ... on Dog { name(surname: true) } ... on Cat { name } } """ ) def encounters_conflict_in_fragments(): assert_errors( """ { ...A ...B } fragment A on Type { x: a } fragment B on Type { x: b } """, [ { "message": "Fields 'x' conflict" " because 'a' and 'b' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(7, 15), (10, 15)], } ], ) def reports_each_conflict_once(): assert_errors( """ { f1 { ...A ...B } f2 { ...B ...A } f3 { ...A ...B x: c } } fragment A on Type { x: a } fragment B on Type { x: b } """, [ { "message": "Fields 'x' conflict" " because 'a' and 'b' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(18, 15), (21, 15)], }, { "message": "Fields 'x' conflict" " because 'c' and 'a' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(14, 17), (18, 15)], }, { "message": "Fields 'x' conflict" " because 'c' and 'b' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(14, 17), (21, 15)], }, ], ) def deep_conflict(): assert_errors( """ { field { x: a }, field { x: b } } """, [ { "message": "Fields 'field' conflict" " because subfields 'x' conflict" " because 'a' and 'b' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(3, 15), (4, 17), (6, 15), (7, 17)], } ], ) def deep_conflict_with_multiple_issues(): assert_errors( """ { field { x: a y: c }, field { x: b y: d } } """, [ { "message": "Fields 'field' conflict" " because subfields 'x' conflict" " because 'a' and 'b' are different fields" " and subfields 'y' conflict" " because 'c' and 'd' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(3, 15), (4, 17), (5, 17), (7, 15), (8, 17), (9, 17)], "path": None, } ], ) def very_deep_conflict(): assert_errors( """ { field { deepField { x: a } }, field { deepField { x: b } } } """, [ { "message": "Fields 'field' conflict" " because subfields 'deepField' conflict" " because subfields 'x' conflict" " because 'a' and 'b' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [ (3, 15), (4, 17), (5, 19), (8, 15), (9, 17), (10, 19), ], "path": None, } ], ) def reports_deep_conflict_to_nearest_common_ancestor(): assert_errors( """ { field { deepField { x: a } deepField { x: b } }, field { deepField { y } } } """, [ { "message": "Fields 'deepField' conflict" " because subfields 'x' conflict" " because 'a' and 'b' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(4, 17), (5, 19), (7, 17), (8, 19)], } ], ) def reports_deep_conflict_to_nearest_common_ancestor_in_fragments(): assert_errors( """ { field { ...F } field { ...F } } fragment F on T { deepField { deeperField { x: a } deeperField { x: b } }, deepField { deeperField { y } } } """, [ { "message": "Fields 'deeperField' conflict" " because subfields 'x' conflict" " because 'a' and 'b' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(12, 17), (13, 19), (15, 17), (16, 19)], } ], ) def reports_deep_conflict_in_nested_fragments(): assert_errors( """ { field { ...F }, field { ...I } } fragment F on T { x: a ...G } fragment G on T { y: c } fragment I on T { y: d ...J } fragment J on T { x: b } """, [ { "message": "Fields 'field' conflict" " because subfields 'x' conflict" " because 'a' and 'b' are different fields" " and subfields 'y' conflict" " because 'c' and 'd' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [ (3, 15), (11, 15), (15, 15), (6, 15), (22, 15), (18, 15), ], "path": None, } ], ) def ignores_unknown_fragments(): assert_valid( """ { field ...Unknown ...Known } fragment Known on T { field ...OtherUnknown } """ ) def describe_return_types_must_be_unambiguous(): schema = build_schema( """ interface SomeBox { deepBox: SomeBox unrelatedField: String } type StringBox implements SomeBox { scalar: String deepBox: StringBox unrelatedField: String listStringBox: [StringBox] stringBox: StringBox intBox: IntBox } type IntBox implements SomeBox { scalar: Int deepBox: IntBox unrelatedField: String listStringBox: [StringBox] stringBox: StringBox intBox: IntBox } interface NonNullStringBox1 { scalar: String! } type NonNullStringBox1Impl implements SomeBox & NonNullStringBox1 { scalar: String! unrelatedField: String deepBox: SomeBox } interface NonNullStringBox2 { scalar: String! } type NonNullStringBox2Impl implements SomeBox & NonNullStringBox2 { scalar: String! unrelatedField: String deepBox: SomeBox } type Connection { edges: [Edge] } type Edge { node: Node } type Node { id: ID name: String } type Query { someBox: SomeBox connection: Connection } """ ) def conflicting_return_types_which_potentially_overlap(): # This is invalid since an object could potentially be both the # Object type IntBox and the interface type NonNullStringBox1. # While that condition does not exist in the current schema, the # schema could expand in the future to allow this. assert_errors( """ { someBox { ...on IntBox { scalar } ...on NonNullStringBox1 { scalar } } } """, [ { "message": "Fields 'scalar' conflict because" " they return conflicting types 'Int' and 'String!'." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(5, 23), (8, 23)], } ], schema, ) def compatible_return_shapes_on_different_return_types(): # In this case `deepBox` returns `SomeBox` in the first usage, and # `StringBox` in the second usage. These types are not the same! # However this is valid because the return *shapes* are compatible. assert_valid( """ { someBox { ... on SomeBox { deepBox { unrelatedField } } ... on StringBox { deepBox { unrelatedField } } } } """, schema=schema, ) def disallows_differing_return_types_despite_no_overlap(): assert_errors( """ { someBox { ... on IntBox { scalar } ... on StringBox { scalar } } } """, [ { "message": "Fields 'scalar' conflict because" " they return conflicting types 'Int' and 'String'." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(5, 23), (8, 23)], } ], schema, ) def reports_correctly_when_a_non_exclusive_follows_an_exclusive(): assert_errors( """ { someBox { ... on IntBox { deepBox { ...X } } } someBox { ... on StringBox { deepBox { ...Y } } } memoed: someBox { ... on IntBox { deepBox { ...X } } } memoed: someBox { ... on StringBox { deepBox { ...Y } } } other: someBox { ...X } other: someBox { ...Y } } fragment X on SomeBox { scalar } fragment Y on SomeBox { scalar: unrelatedField } """, [ { "message": "Fields 'other' conflict because" " subfields 'scalar' conflict because" " 'scalar' and 'unrelatedField' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(31, 19), (39, 19), (34, 19), (42, 19)], "path": None, } ], schema, ) def disallows_differing_return_type_nullability_despite_no_overlap(): assert_errors( """ { someBox { ... on NonNullStringBox1 { scalar } ... on StringBox { scalar } } } """, [ { "message": "Fields 'scalar' conflict because" " they return conflicting types 'String!' and 'String'." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(5, 23), (8, 23)], } ], schema, ) def disallows_differing_return_type_list_despite_no_overlap_1(): assert_errors( """ { someBox { ... on IntBox { box: listStringBox { scalar } } ... on StringBox { box: stringBox { scalar } } } } """, [ { "message": "Fields 'box' conflict because they return" " conflicting types '[StringBox]' and 'StringBox'." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(5, 23), (10, 23)], } ], schema, ) assert_errors( """ { someBox { ... on IntBox { box: stringBox { scalar } } ... on StringBox { box: listStringBox { scalar } } } } """, [ { "message": "Fields 'box' conflict because they return" " conflicting types 'StringBox' and '[StringBox]'." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(5, 23), (10, 23)], } ], schema, ) def disallows_differing_subfields(): assert_errors( """ { someBox { ... on IntBox { box: stringBox { val: scalar val: unrelatedField } } ... on StringBox { box: stringBox { val: scalar } } } } """, [ { "message": "Fields 'val' conflict because" " 'scalar' and 'unrelatedField' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(6, 25), (7, 25)], } ], schema, ) def disallows_differing_deep_return_types_despite_no_overlap(): assert_errors( """ { someBox { ... on IntBox { box: stringBox { scalar } } ... on StringBox { box: intBox { scalar } } } } """, [ { "message": "Fields 'box' conflict" " because subfields 'scalar' conflict" " because they return conflicting types 'String' and 'Int'." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(5, 23), (6, 25), (10, 23), (11, 25)], "path": None, } ], schema, ) def allows_non_conflicting_overlapping_types(): assert_valid( """ { someBox { ... on IntBox { scalar: unrelatedField } ... on StringBox { scalar } } } """, schema=schema, ) def same_wrapped_scalar_return_types(): assert_valid( """ { someBox { ...on NonNullStringBox1 { scalar } ...on NonNullStringBox2 { scalar } } } """, schema=schema, ) def allows_inline_fragments_without_type_condition(): assert_valid( """ { a ... { a } } """, schema=schema, ) def compares_deep_types_including_list(): assert_errors( """ { connection { ...edgeID edges { node { id: name } } } } fragment edgeID on Connection { edges { node { id } } } """, [ { "message": "Fields 'edges' conflict" " because subfields 'node' conflict" " because subfields 'id' conflict" " because 'name' and 'id' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [ (5, 21), (6, 23), (7, 25), (14, 19), (15, 21), (16, 23), ], "path": None, } ], schema, ) def ignores_unknown_types(): assert_valid( """ { someBox { ...on UnknownType { scalar } ...on NonNullStringBox2 { scalar } } } """, schema=schema, ) def works_for_field_names_that_are_js_keywords(): schema_with_keywords = build_schema( """ type Foo { constructor: String } type Query { foo: Foo } """ ) assert_valid( """ { foo { constructor } } """, schema=schema_with_keywords, ) def works_for_field_names_that_are_python_keywords(): schema_with_keywords = build_schema( """ type Foo { class: String } type Query { foo: Foo } """ ) assert_valid( """ { foo { class } } """, schema=schema_with_keywords, ) def does_not_infinite_loop_on_recursive_fragments(): assert_valid( """ fragment fragA on Human { name, relatives { name, ...fragA } } """ ) def does_not_infinite_loop_on_immediately_recursive_fragments(): assert_valid( """ fragment fragA on Human { name, ...fragA } """ ) def does_not_infinite_loop_on_transitively_recursive_fragments(): assert_valid( """ fragment fragA on Human { name, ...fragB } fragment fragB on Human { name, ...fragC } fragment fragC on Human { name, ...fragA } """ ) def finds_invalid_case_even_with_immediately_recursive_fragment(): assert_errors( """ fragment sameAliasesWithDifferentFieldTargets on Dog { ...sameAliasesWithDifferentFieldTargets fido: name fido: nickname } """, [ { "message": "Fields 'fido' conflict" " because 'name' and 'nickname' are different fields." " Use different aliases on the fields" " to fetch both if this was intentional.", "locations": [(4, 15), (5, 15)], } ], )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_overlapping_fields_can_be_merged.py", "copies": "1", "size": "32146", "license": "mit", "hash": -3563562996618906600, "line_mean": 28.6823638042, "line_max": 88, "alpha_frac": 0.3424687364, "autogenerated": false, "ratio": 5.800433056658246, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6642901793058247, "avg_score": null, "num_lines": null }
from functools import partial from graphql.utilities import build_schema from graphql.validation import ProvidedRequiredArgumentsRule from graphql.validation.rules.provided_required_arguments import ( ProvidedRequiredArgumentsOnDirectivesRule, ) from .harness import assert_validation_errors, assert_sdl_validation_errors assert_errors = partial(assert_validation_errors, ProvidedRequiredArgumentsRule) assert_valid = partial(assert_errors, errors=[]) assert_sdl_errors = partial( assert_sdl_validation_errors, ProvidedRequiredArgumentsOnDirectivesRule ) assert_sdl_valid = partial(assert_sdl_errors, errors=[]) def describe_validate_provided_required_arguments(): def ignores_unknown_arguments(): assert_valid( """ { dog { isHouseTrained(unknownArgument: true) } }""" ) def describe_valid_non_nullable_value(): def arg_on_optional_arg(): assert_valid( """ { dog { isHouseTrained(atOtherHomes: true) } }""" ) def no_arg_on_optional_arg(): assert_valid( """ { dog { isHouseTrained } }""" ) def no_arg_on_non_null_field_with_default(): assert_valid( """ { complicatedArgs { nonNullFieldWithDefault } }""" ) def multiple_args(): assert_valid( """ { complicatedArgs { multipleReqs(req1: 1, req2: 2) } } """ ) def multiple_args_reverse_order(): assert_valid( """ { complicatedArgs { multipleReqs(req2: 2, req1: 1) } } """ ) def no_args_on_multiple_optional(): assert_valid( """ { complicatedArgs { multipleOpts } } """ ) def one_arg_on_multiple_optional(): assert_valid( """ { complicatedArgs { multipleOpts(opt1: 1) } } """ ) def second_arg_on_multiple_optional(): assert_valid( """ { complicatedArgs { multipleOpts(opt2: 1) } } """ ) def multiple_required_args_on_mixed_list(): assert_valid( """ { complicatedArgs { multipleOptAndReq(req1: 3, req2: 4) } } """ ) def multiple_required_and_one_optional_arg_on_mixed_list(): assert_valid( """ { complicatedArgs { multipleOptAndReq(req1: 3, req2: 4, opt1: 5) } } """ ) def all_required_and_optional_args_on_mixed_list(): assert_valid( """ { complicatedArgs { multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6) } } """ ) def describe_invalid_non_nullable_value(): def missing_one_non_nullable_argument(): assert_errors( """ { complicatedArgs { multipleReqs(req2: 2) } } """, [ { "message": "Field 'multipleReqs' argument 'req1'" " of type 'Int!' is required, but it was not provided.", "locations": [(4, 21)], }, ], ) def missing_multiple_non_nullable_arguments(): assert_errors( """ { complicatedArgs { multipleReqs } } """, [ { "message": "Field 'multipleReqs' argument 'req1'" " of type 'Int!' is required, but it was not provided.", "locations": [(4, 21)], }, { "message": "Field 'multipleReqs' argument 'req2'" " of type 'Int!' is required, but it was not provided.", "locations": [(4, 21)], }, ], ) def incorrect_value_and_missing_argument(): assert_errors( """ { complicatedArgs { multipleReqs(req1: "one") } } """, [ { "message": "Field 'multipleReqs' argument 'req2'" " of type 'Int!' is required, but it was not provided.", "locations": [(4, 21)], }, ], ) def describe_directive_arguments(): def ignores_unknown_directives(): assert_valid( """ { dog @unknown } """ ) def with_directives_of_valid_type(): assert_valid( """ { dog @include(if: true) { name } human @skip(if: false) { name } } """ ) def with_directive_with_missing_types(): assert_errors( """ { dog @include { name @skip } } """, [ { "message": "Directive '@include' argument 'if' of type" " 'Boolean!' is required, but it was not provided.", "locations": [(3, 23)], }, { "message": "Directive '@skip' argument 'if' of type" " 'Boolean!' is required, but it was not provided.", "locations": [(4, 26)], }, ], ) def describe_within_sdl(): def missing_optional_args_on_directive_defined_inside_sdl(): assert_sdl_valid( """ type Query { foo: String @test } directive @test(arg1: String, arg2: String! = "") on FIELD_DEFINITION """ ) def missing_arg_on_directive_defined_inside_sdl(): assert_sdl_errors( """ type Query { foo: String @test } directive @test(arg: String!) on FIELD_DEFINITION """, [ { "message": "Directive '@test' argument 'arg' of type" " 'String!' is required, but it was not provided.", "locations": [(3, 31)], }, ], ) def missing_arg_on_standard_directive(): assert_sdl_errors( """ type Query { foo: String @include } """, [ { "message": "Directive '@include' argument 'if' of type" " 'Boolean!' is required, but it was not provided.", "locations": [(3, 31)], }, ], ) def missing_arg_on_overridden_standard_directive(): assert_sdl_errors( """ type Query { foo: String @deprecated } directive @deprecated(reason: String!) on FIELD """, [ { "message": "Directive '@deprecated' argument 'reason' of type" " 'String!' is required, but it was not provided.", "locations": [(3, 31)], }, ], ) def missing_arg_on_directive_defined_in_schema_extension(): schema = build_schema( """ type Query { foo: String } """ ) assert_sdl_errors( """ directive @test(arg: String!) on OBJECT extend type Query @test """, [ { "message": "Directive '@test' argument 'arg' of type" " 'String!' is required, but it was not provided.", "locations": [(4, 36)], }, ], schema, ) def missing_arg_on_directive_used_in_schema_extension(): schema = build_schema( """ directive @test(arg: String!) on OBJECT type Query { foo: String } """ ) assert_sdl_errors( """ extend type Query @test """, [ { "message": "Directive '@test' argument 'arg' of type" " 'String!' is required, but it was not provided.", "locations": [(2, 36)], }, ], schema, )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_provided_required_arguments.py", "copies": "1", "size": "10621", "license": "mit", "hash": -1044376309870730800, "line_mean": 27.8614130435, "line_max": 86, "alpha_frac": 0.3486489031, "autogenerated": false, "ratio": 5.649468085106383, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6498116988206383, "avg_score": null, "num_lines": null }
from functools import partial from graphql.utilities import build_schema from graphql.validation.rules.lone_schema_definition import LoneSchemaDefinitionRule from .harness import assert_sdl_validation_errors assert_sdl_errors = partial(assert_sdl_validation_errors, LoneSchemaDefinitionRule) assert_sdl_valid = partial(assert_sdl_errors, errors=[]) def describe_validate_schema_definition_should_be_alone(): def no_schema(): assert_sdl_valid( """ type Query { foo: String } """ ) def one_schema_definition(): assert_sdl_valid( """ schema { query: Foo } type Foo { foo: String } """ ) def multiple_schema_definitions(): assert_sdl_errors( """ schema { query: Foo } type Foo { foo: String } schema { mutation: Foo } schema { subscription: Foo } """, [ { "message": "Must provide only one schema definition.", "locations": [(10, 13)], }, { "message": "Must provide only one schema definition.", "locations": [(14, 13)], }, ], ) def define_schema_in_schema_extension(): schema = build_schema( """ type Foo { foo: String } """ ) assert_sdl_valid( """ schema { query: Foo } """, schema=schema, ) def redefine_schema_in_schema_extension(): schema = build_schema( """ schema { query: Foo } type Foo { foo: String } """ ) assert_sdl_errors( """ schema { mutation: Foo } """, [ { "message": "Cannot define a new schema within a schema extension.", "locations": [(2, 13)], } ], schema, ) def redefine_implicit_schema_in_schema_extension(): schema = build_schema( """ type Query { fooField: Foo } type Foo { foo: String } """ ) assert_sdl_errors( """ schema { mutation: Foo } """, [ { "message": "Cannot define a new schema within a schema extension.", "locations": [(2, 13)], }, ], schema, ) def extend_schema_in_schema_extension(): schema = build_schema( """ type Query { fooField: Foo } type Foo { foo: String } """ ) assert_sdl_valid( """ extend schema { mutation: Foo } """, schema=schema, )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_lone_schema_definition.py", "copies": "1", "size": "3468", "license": "mit", "hash": 4968167562528436000, "line_mean": 20.5403726708, "line_max": 87, "alpha_frac": 0.3711072664, "autogenerated": false, "ratio": 5.470031545741325, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0002881785125117241, "num_lines": 161 }
from functools import partial from graphql.utilities import build_schema from graphql.validation.rules.possible_type_extensions import PossibleTypeExtensionsRule from .harness import assert_sdl_validation_errors assert_errors = partial(assert_sdl_validation_errors, PossibleTypeExtensionsRule) assert_valid = partial(assert_errors, errors=[]) def describe_validate_possible_type_extensions(): def no_extensions(): assert_valid( """ scalar FooScalar type FooObject interface FooInterface union FooUnion enum FooEnum input FooInputObject """ ) def one_extension_per_type(): assert_valid( """ scalar FooScalar type FooObject interface FooInterface union FooUnion enum FooEnum input FooInputObject extend scalar FooScalar @dummy extend type FooObject @dummy extend interface FooInterface @dummy extend union FooUnion @dummy extend enum FooEnum @dummy extend input FooInputObject @dummy """ ) def many_extensions_per_type(): assert_valid( """ scalar FooScalar type FooObject interface FooInterface union FooUnion enum FooEnum input FooInputObject extend scalar FooScalar @dummy extend type FooObject @dummy extend interface FooInterface @dummy extend union FooUnion @dummy extend enum FooEnum @dummy extend input FooInputObject @dummy extend scalar FooScalar @dummy extend type FooObject @dummy extend interface FooInterface @dummy extend union FooUnion @dummy extend enum FooEnum @dummy extend input FooInputObject @dummy """ ) def extending_unknown_type(): message = ( "Cannot extend type 'Unknown' because it is not defined." " Did you mean 'Known'?" ) assert_errors( """ type Known extend scalar Unknown @dummy extend type Unknown @dummy extend interface Unknown @dummy extend union Unknown @dummy extend enum Unknown @dummy extend input Unknown @dummy """, [ {"message": message, "locations": [(4, 27)]}, {"message": message, "locations": [(5, 25)]}, {"message": message, "locations": [(6, 30)]}, {"message": message, "locations": [(7, 26)]}, {"message": message, "locations": [(8, 25)]}, {"message": message, "locations": [(9, 26)]}, ], ) def does_not_consider_non_type_definitions(): message = "Cannot extend type 'Foo' because it is not defined." assert_errors( """ query Foo { __typename } fragment Foo on Query { __typename } directive @Foo on SCHEMA extend scalar Foo @dummy extend type Foo @dummy extend interface Foo @dummy extend union Foo @dummy extend enum Foo @dummy extend input Foo @dummy """, [ {"message": message, "locations": [(6, 27)]}, {"message": message, "locations": [(7, 25)]}, {"message": message, "locations": [(8, 30)]}, {"message": message, "locations": [(9, 26)]}, {"message": message, "locations": [(10, 25)]}, {"message": message, "locations": [(11, 26)]}, ], ) def extending_with_different_kinds(): assert_errors( """ scalar FooScalar type FooObject interface FooInterface union FooUnion enum FooEnum input FooInputObject extend type FooScalar @dummy extend interface FooObject @dummy extend union FooInterface @dummy extend enum FooUnion @dummy extend input FooEnum @dummy extend scalar FooInputObject @dummy """, [ { "message": "Cannot extend non-object type 'FooScalar'.", "locations": [(2, 13), (9, 13)], }, { "message": "Cannot extend non-interface type 'FooObject'.", "locations": [(3, 13), (10, 13)], }, { "message": "Cannot extend non-union type 'FooInterface'.", "locations": [(4, 13), (11, 13)], }, { "message": "Cannot extend non-enum type 'FooUnion'.", "locations": [(5, 13), (12, 13)], }, { "message": "Cannot extend non-input object type 'FooEnum'.", "locations": [(6, 13), (13, 13)], }, { "message": "Cannot extend non-scalar type 'FooInputObject'.", "locations": [(7, 13), (14, 13)], }, ], ) def extending_types_within_existing_schema(): schema = build_schema( """ scalar FooScalar type FooObject interface FooInterface union FooUnion enum FooEnum input FooInputObject """ ) sdl = """ extend scalar FooScalar @dummy extend type FooObject @dummy extend interface FooInterface @dummy extend union FooUnion @dummy extend enum FooEnum @dummy extend input FooInputObject @dummy """ assert_valid(sdl, schema=schema) def extending_unknown_types_within_existing_schema(): schema = build_schema("type Known") sdl = """ extend scalar Unknown @dummy extend type Unknown @dummy extend interface Unknown @dummy extend union Unknown @dummy extend enum Unknown @dummy extend input Unknown @dummy """ message = ( "Cannot extend type 'Unknown' because it is not defined." " Did you mean 'Known'?" ) assert_errors( sdl, [ {"message": message, "locations": [(2, 27)]}, {"message": message, "locations": [(3, 25)]}, {"message": message, "locations": [(4, 30)]}, {"message": message, "locations": [(5, 26)]}, {"message": message, "locations": [(6, 25)]}, {"message": message, "locations": [(7, 26)]}, ], schema, ) def extending_types_with_different_kinds_within_existing_schema(): schema = build_schema( """ scalar FooScalar type FooObject interface FooInterface union FooUnion enum FooEnum input FooInputObject """ ) sdl = """ extend type FooScalar @dummy extend interface FooObject @dummy extend union FooInterface @dummy extend enum FooUnion @dummy extend input FooEnum @dummy extend scalar FooInputObject @dummy """ assert_errors( sdl, [ { "message": "Cannot extend non-object type 'FooScalar'.", "locations": [(2, 13)], }, { "message": "Cannot extend non-interface type 'FooObject'.", "locations": [(3, 13)], }, { "message": "Cannot extend non-union type 'FooInterface'.", "locations": [(4, 13)], }, { "message": "Cannot extend non-enum type 'FooUnion'.", "locations": [(5, 13)], }, { "message": "Cannot extend non-input object type 'FooEnum'.", "locations": [(6, 13)], }, { "message": "Cannot extend non-scalar type 'FooInputObject'.", "locations": [(7, 13)], }, ], schema, )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_possible_type_extensions.py", "copies": "1", "size": "8734", "license": "mit", "hash": -3188730492315333000, "line_mean": 31.5895522388, "line_max": 88, "alpha_frac": 0.4719487062, "autogenerated": false, "ratio": 5.371463714637146, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6343412420837147, "avg_score": null, "num_lines": null }
from functools import partial from graphql.utilities import build_schema from graphql.validation.rules.unique_enum_value_names import UniqueEnumValueNamesRule from .harness import assert_sdl_validation_errors assert_errors = partial(assert_sdl_validation_errors, UniqueEnumValueNamesRule) assert_valid = partial(assert_errors, errors=[]) def describe_validate_unique_field_definition_names(): def no_values(): assert_valid( """ enum SomeEnum """ ) def one_value(): assert_valid( """ enum SomeEnum { FOO } """ ) def multiple_values(): assert_valid( """ enum SomeEnum { FOO BAR } """ ) def duplicate_values_inside_the_same_enum_definition(): assert_errors( """ enum SomeEnum { FOO BAR FOO } """, [ { "message": "Enum value 'SomeEnum.FOO' can only be defined once.", "locations": [(3, 15), (5, 15)], }, ], ) def extend_enum_with_new_value(): assert_valid( """ enum SomeEnum { FOO } extend enum SomeEnum { BAR } extend enum SomeEnum { BAZ } """ ) def extend_enum_with_duplicate_value(): assert_errors( """ extend enum SomeEnum { FOO } enum SomeEnum { FOO } """, [ { "message": "Enum value 'SomeEnum.FOO' can only be defined once.", "locations": [(3, 15), (6, 15)], }, ], ) def duplicate_value_inside_extension(): assert_errors( """ enum SomeEnum extend enum SomeEnum { FOO BAR FOO } """, [ { "message": "Enum value 'SomeEnum.FOO' can only be defined once.", "locations": [(4, 15), (6, 15)], }, ], ) def duplicate_value_inside_different_extension(): assert_errors( """ enum SomeEnum extend enum SomeEnum { FOO } extend enum SomeEnum { FOO } """, [ { "message": "Enum value 'SomeEnum.FOO' can only be defined once.", "locations": [(4, 15), (7, 15)], }, ], ) def adding_new_value_to_the_enum_inside_existing_schema(): schema = build_schema("enum SomeEnum") sdl = """ extend enum SomeEnum { FOO } """ assert_valid(sdl, schema=schema) def adding_conflicting_value_to_existing_schema_twice(): schema = build_schema( """ enum SomeEnum { FOO } """ ) sdl = """ extend enum SomeEnum { FOO } extend enum SomeEnum { FOO } """ assert_errors( sdl, [ { "message": "Enum value 'SomeEnum.FOO' already exists in the schema." " It cannot also be defined in this type extension.", "locations": [(3, 15)], }, { "message": "Enum value 'SomeEnum.FOO' already exists in the schema." " It cannot also be defined in this type extension.", "locations": [(6, 15)], }, ], schema, ) def adding_enum_values_to_existing_schema_twice(): schema = build_schema("enum SomeEnum") sdl = """ extend enum SomeEnum { FOO } extend enum SomeEnum { FOO } """ assert_errors( sdl, [ { "message": "Enum value 'SomeEnum.FOO' can only be defined once.", "locations": [(3, 15), (6, 15)], }, ], schema, )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_unique_enum_value_names.py", "copies": "1", "size": "4671", "license": "mit", "hash": -110674244198395920, "line_mean": 23.4554973822, "line_max": 88, "alpha_frac": 0.3868550632, "autogenerated": false, "ratio": 5.178492239467849, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0004829285443603386, "num_lines": 191 }
from functools import partial from graphql.utilities import build_schema from graphql.validation.rules.unique_field_definition_names import ( UniqueFieldDefinitionNamesRule, ) from .harness import assert_sdl_validation_errors assert_errors = partial(assert_sdl_validation_errors, UniqueFieldDefinitionNamesRule) assert_valid = partial(assert_errors, errors=[]) def describe_validate_unique_field_definition_names(): def no_fields(): assert_valid( """ type SomeObject interface SomeInterface input SomeInputObject """ ) def one_field(): assert_valid( """ type SomeObject { foo: String } interface SomeInterface { foo: String } input SomeInputObject { foo: String } """ ) def multiple_fields(): assert_valid( """ type SomeObject { foo: String bar: String } interface SomeInterface { foo: String bar: String } input SomeInputObject { foo: String bar: String } """ ) def duplicate_fields_inside_the_same_type_definition(): assert_errors( """ type SomeObject { foo: String bar: String foo: String } interface SomeInterface { foo: String bar: String foo: String } input SomeInputObject { foo: String bar: String foo: String } """, [ { "message": "Field 'SomeObject.foo' can only be defined once.", "locations": [(3, 15), (5, 15)], }, { "message": "Field 'SomeInterface.foo' can only be defined once.", "locations": [(9, 15), (11, 15)], }, { "message": "Field 'SomeInputObject.foo' can only be defined once.", "locations": [(15, 15), (17, 15)], }, ], ) def extend_type_with_new_field(): assert_valid( """ type SomeObject { foo: String } extend type SomeObject { bar: String } extend type SomeObject { baz: String } interface SomeInterface { foo: String } extend interface SomeInterface { bar: String } extend interface SomeInterface { baz: String } input SomeInputObject { foo: String } extend input SomeInputObject { bar: String } extend input SomeInputObject { baz: String } """ ) def extend_type_with_duplicate_field(): assert_errors( """ extend type SomeObject { foo: String } type SomeObject { foo: String } extend interface SomeInterface { foo: String } interface SomeInterface { foo: String } extend input SomeInputObject { foo: String } input SomeInputObject { foo: String } """, [ { "message": "Field 'SomeObject.foo' can only be defined once.", "locations": [(3, 15), (6, 15)], }, { "message": "Field 'SomeInterface.foo' can only be defined once.", "locations": [(10, 15), (13, 15)], }, { "message": "Field 'SomeInputObject.foo' can only be defined once.", "locations": [(17, 15), (20, 15)], }, ], ) def duplicate_field_inside_extension(): assert_errors( """ type SomeObject extend type SomeObject { foo: String bar: String foo: String } interface SomeInterface extend interface SomeInterface { foo: String bar: String foo: String } input SomeInputObject extend input SomeInputObject { foo: String bar: String foo: String } """, [ { "message": "Field 'SomeObject.foo' can only be defined once.", "locations": [(4, 15), (6, 15)], }, { "message": "Field 'SomeInterface.foo' can only be defined once.", "locations": [(11, 15), (13, 15)], }, { "message": "Field 'SomeInputObject.foo' can only be defined once.", "locations": [(18, 15), (20, 15)], }, ], ) def duplicate_field_inside_different_extension(): assert_errors( """ type SomeObject extend type SomeObject { foo: String } extend type SomeObject { foo: String } interface SomeInterface extend interface SomeInterface { foo: String } extend interface SomeInterface { foo: String } input SomeInputObject extend input SomeInputObject { foo: String } extend input SomeInputObject { foo: String } """, [ { "message": "Field 'SomeObject.foo' can only be defined once.", "locations": [(4, 15), (7, 15)], }, { "message": "Field 'SomeInterface.foo' can only be defined once.", "locations": [(12, 15), (15, 15)], }, { "message": "Field 'SomeInputObject.foo' can only be defined once.", "locations": [(20, 15), (23, 15)], }, ], ) def adding_new_field_to_the_type_inside_existing_schema(): schema = build_schema( """ type SomeObject interface SomeInterface input SomeInputObject """ ) sdl = """ extend type SomeObject { foo: String } extend interface SomeInterface { foo: String } extend input SomeInputObject { foo: String } """ assert_valid(sdl, schema=schema) def adding_conflicting_fields_to_existing_schema_twice(): schema = build_schema( """ type SomeObject { foo: String } interface SomeInterface { foo: String } input SomeInputObject { foo: String } """ ) sdl = """ extend type SomeObject { foo: String } extend interface SomeInterface { foo: String } extend input SomeInputObject { foo: String } extend type SomeObject { foo: String } extend interface SomeInterface { foo: String } extend input SomeInputObject { foo: String } """ assert_errors( sdl, [ { "message": "Field 'SomeObject.foo' already exists in the schema." " It cannot also be defined in this type extension.", "locations": [(3, 15)], }, { "message": "Field 'SomeInterface.foo' already exists in the schema." " It cannot also be defined in this type extension.", "locations": [(6, 15)], }, { "message": "Field 'SomeInputObject.foo'" " already exists in the schema." " It cannot also be defined in this type extension.", "locations": [(9, 15)], }, { "message": "Field 'SomeObject.foo' already exists in the schema." " It cannot also be defined in this type extension.", "locations": [(13, 15)], }, { "message": "Field 'SomeInterface.foo'" " already exists in the schema." " It cannot also be defined in this type extension.", "locations": [(16, 15)], }, { "message": "Field 'SomeInputObject.foo'" " already exists in the schema." " It cannot also be defined in this type extension.", "locations": [(19, 15)], }, ], schema, ) def adding_fields_to_existing_schema_twice(): schema = build_schema( """ type SomeObject interface SomeInterface input SomeInputObject """ ) sdl = """ extend type SomeObject { foo: String } extend type SomeObject { foo: String } extend interface SomeInterface { foo: String } extend interface SomeInterface { foo: String } extend input SomeInputObject { foo: String } extend input SomeInputObject { foo: String } """ assert_errors( sdl, [ { "message": "Field 'SomeObject.foo' can only be defined once.", "locations": [(3, 15), (6, 15)], }, { "message": "Field 'SomeInterface.foo' can only be defined once.", "locations": [(10, 15), (13, 15)], }, { "message": "Field 'SomeInputObject.foo' can only be defined once.", "locations": [(17, 15), (20, 15)], }, ], schema, )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_unique_field_definition_names.py", "copies": "1", "size": "11247", "license": "mit", "hash": -6381898493584312000, "line_mean": 26.6339066339, "line_max": 88, "alpha_frac": 0.4017071219, "autogenerated": false, "ratio": 5.629129129129129, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.653083625102913, "avg_score": null, "num_lines": null }
from functools import partial from graphql.utilities import build_schema from graphql.validation.rules.unique_operation_types import UniqueOperationTypesRule from .harness import assert_sdl_validation_errors assert_errors = partial(assert_sdl_validation_errors, UniqueOperationTypesRule) assert_valid = partial(assert_errors, errors=[]) def describe_validate_unique_operation_types(): def no_schema_definition(): assert_valid( """ type Foo """ ) def schema_definition_with_all_types(): assert_valid( """ type Foo schema { query: Foo mutation: Foo subscription: Foo } """ ) def schema_definition_with_single_extension(): assert_valid( """ type Foo schema { query: Foo } extend schema { mutation: Foo subscription: Foo } """ ) def schema_definition_with_separate_extensions(): assert_valid( """ type Foo schema { query: Foo } extend schema { mutation: Foo } extend schema { subscription: Foo } """ ) def extend_schema_before_definition(): assert_valid( """ type Foo extend schema { mutation: Foo } extend schema { subscription: Foo } schema { query: Foo } """ ) def duplicate_operation_types_inside_single_schema_definition(): assert_errors( """ type Foo schema { query: Foo mutation: Foo subscription: Foo query: Foo mutation: Foo subscription: Foo } """, [ { "message": "There can be only one query type in schema.", "locations": [(5, 15), (9, 15)], }, { "message": "There can be only one mutation type in schema.", "locations": [(6, 15), (10, 15)], }, { "message": "There can be only one subscription type in schema.", "locations": [(7, 15), (11, 15)], }, ], ) def duplicate_operation_types_inside_schema_extension(): assert_errors( """ type Foo schema { query: Foo mutation: Foo subscription: Foo } extend schema { query: Foo mutation: Foo subscription: Foo } """, [ { "message": "There can be only one query type in schema.", "locations": [(5, 15), (11, 15)], }, { "message": "There can be only one mutation type in schema.", "locations": [(6, 15), (12, 15)], }, { "message": "There can be only one subscription type in schema.", "locations": [(7, 15), (13, 15)], }, ], ) def duplicate_operation_types_inside_schema_extension_twice(): assert_errors( """ type Foo schema { query: Foo mutation: Foo subscription: Foo } extend schema { query: Foo mutation: Foo subscription: Foo } extend schema { query: Foo mutation: Foo subscription: Foo } """, [ { "message": "There can be only one query type in schema.", "locations": [(5, 15), (11, 15)], }, { "message": "There can be only one mutation type in schema.", "locations": [(6, 15), (12, 15)], }, { "message": "There can be only one subscription type in schema.", "locations": [(7, 15), (13, 15)], }, { "message": "There can be only one query type in schema.", "locations": [(5, 15), (17, 15)], }, { "message": "There can be only one mutation type in schema.", "locations": [(6, 15), (18, 15)], }, { "message": "There can be only one subscription type in schema.", "locations": [(7, 15), (19, 15)], }, ], ) def duplicate_operation_types_inside_second_schema_extension(): assert_errors( """ type Foo schema { query: Foo } extend schema { mutation: Foo subscription: Foo } extend schema { query: Foo mutation: Foo subscription: Foo } """, [ { "message": "There can be only one query type in schema.", "locations": [(5, 15), (14, 15)], }, { "message": "There can be only one mutation type in schema.", "locations": [(9, 15), (15, 15)], }, { "message": "There can be only one subscription type in schema.", "locations": [(10, 15), (16, 15)], }, ], ) def define_schema_inside_extension_sdl(): schema = build_schema("type Foo") sdl = """ schema { query: Foo mutation: Foo subscription: Foo } """ assert_valid(sdl, schema=schema) def define_and_extend_schema_inside_extension_sdl(): schema = build_schema("type Foo") sdl = """ schema { query: Foo } extend schema { mutation: Foo } extend schema { subscription: Foo } """ assert_valid(sdl, schema=schema) def adding_new_operation_types_to_existing_schema(): schema = build_schema("type Query") sdl = """ extend schema { mutation: Foo } extend schema { subscription: Foo } """ assert_valid(sdl, schema=schema) def adding_conflicting_operation_types_to_existing_schema(): schema = build_schema( """ type Query type Mutation type Subscription type Foo """ ) sdl = """ extend schema { query: Foo mutation: Foo subscription: Foo } """ assert_errors( sdl, [ { "message": "Type for query already defined in the schema." " It cannot be redefined.", "locations": [(3, 15)], }, { "message": "Type for mutation already defined in the schema." " It cannot be redefined.", "locations": [(4, 15)], }, { "message": "Type for subscription already defined in the schema." " It cannot be redefined.", "locations": [(5, 15)], }, ], schema, ) def adding_conflicting_operation_types_to_existing_schema_twice(): schema = build_schema( """ type Query type Mutation type Subscription """ ) sdl = """ extend schema { query: Foo mutation: Foo subscription: Foo } extend schema { query: Foo mutation: Foo subscription: Foo } """ assert_errors( sdl, [ { "message": "Type for query already defined in the schema." " It cannot be redefined.", "locations": [(3, 15)], }, { "message": "Type for mutation already defined in the schema." " It cannot be redefined.", "locations": [(4, 15)], }, { "message": "Type for subscription already defined in the schema." " It cannot be redefined.", "locations": [(5, 15)], }, { "message": "Type for query already defined in the schema." " It cannot be redefined.", "locations": [(9, 15)], }, { "message": "Type for mutation already defined in the schema." " It cannot be redefined.", "locations": [(10, 15)], }, { "message": "Type for subscription already defined in the schema." " It cannot be redefined.", "locations": [(11, 15)], }, ], schema, )
{ "repo_name": "graphql-python/graphql-core", "path": "tests/validation/test_unique_operation_types.py", "copies": "1", "size": "9879", "license": "mit", "hash": 6091515065104204000, "line_mean": 27.0653409091, "line_max": 85, "alpha_frac": 0.396598846, "autogenerated": false, "ratio": 5.546883773161145, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6443482619161146, "avg_score": null, "num_lines": null }