repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
pfnet/chainer
chainer/functions/math/sqrt.py
8
2385
import numpy from chainer import backend from chainer.backends import cuda from chainer import function_node from chainer import utils from chainer.utils import type_check import chainerx class Sqrt(function_node.FunctionNode): @property def label(self): return 'sqrt' def check_type_forward(self, in_types): type_check._argname(in_types, ('x',)) type_check.expect(in_types[0].dtype.kind == 'f') def forward_chainerx(self, x): return chainerx.sqrt(x[0]), def forward(self, x): self.retain_outputs((0,)) xp = backend.get_array_module(*x) return utils.force_array(xp.sqrt(x[0], dtype=x[0].dtype)), def backward(self, indexes, grad_outputs): gx = self.get_retained_outputs()[0] gy = grad_outputs[0] return gy / (gx * 2.0), class RsqrtGPU(function_node.FunctionNode): @property def label(self): return 'rsqrt' def check_type_forward(self, in_types): type_check._argname(in_types, ('x',)) type_check.expect(in_types[0].dtype.kind == 'f') def forward_gpu(self, inputs): self.retain_outputs((0,)) x, = inputs out = cuda.cupyx.rsqrt(x, dtype=x.dtype) return utils.force_array(out), def backward(self, indexes, grad_outputs): y, = self.get_retained_outputs() gy, = grad_outputs return gy * (y ** 3) * -0.5, def sqrt(x): """Elementwise square root function. .. math:: y_i = \\sqrt x_i. If the value of :math:`x_i` is negative, it returns ``Nan`` for :math:`y_i` respect to underlying numpy and cupy specification. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. """ return Sqrt().apply((x,))[0] def rsqrt(x): """Computes elementwise reciprocal of square root of input :math:`x_i`. .. math:: y_i = {1 \\over \\sqrt x_i}. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. .. seealso:: :func:`~chainer.functions.sqrt` """ xp = backend.get_array_module(x) if xp is numpy or xp is chainerx: return 1.0 / sqrt(x) # CuPy provides `rsqrt` which is faster than `1.0 / sqrt(x)`. return RsqrtGPU().apply((x,))[0]
mit
markhice/ghost-casper
node_modules_bak/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/lexers/hdl.py
363
16209
# -*- coding: utf-8 -*- """ pygments.lexers.hdl ~~~~~~~~~~~~~~~~~~~ Lexers for hardware descriptor languages. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re from pygments.lexer import RegexLexer, bygroups, include, using, this from pygments.token import \ Text, Comment, Operator, Keyword, Name, String, Number, Punctuation, \ Error __all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer'] class VerilogLexer(RegexLexer): """ For verilog source code with preprocessor directives. *New in Pygments 1.4.* """ name = 'verilog' aliases = ['verilog', 'v'] filenames = ['*.v'] mimetypes = ['text/x-verilog'] #: optional Comment or Whitespace _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+' tokens = { 'root': [ (r'^\s*`define', Comment.Preproc, 'macro'), (r'\n', Text), (r'\s+', Text), (r'\\\n', Text), # line continuation (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), (r'[{}#@]', Punctuation), (r'L?"', String, 'string'), (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex), (r'([0-9]+)|(\'b)[0-1]+', Number.Hex), # should be binary (r'([0-9]+)|(\'d)[0-9]+', Number.Integer), (r'([0-9]+)|(\'o)[0-7]+', Number.Oct), (r'\'[01xz]', Number), (r'\d+[Ll]?', Number.Integer), (r'\*/', Error), (r'[~!%^&*+=|?:<>/-]', Operator), (r'[()\[\],.;\']', Punctuation), (r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), (r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)), (r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text), 'import'), (r'(always|always_comb|always_ff|always_latch|and|assign|automatic|' r'begin|break|buf|bufif0|bufif1|case|casex|casez|cmos|const|' r'continue|deassign|default|defparam|disable|do|edge|else|end|endcase|' r'endfunction|endgenerate|endmodule|endpackage|endprimitive|endspecify|' r'endtable|endtask|enum|event|final|for|force|forever|fork|function|' r'generate|genvar|highz0|highz1|if|initial|inout|input|' r'integer|join|large|localparam|macromodule|medium|module|' r'nand|negedge|nmos|nor|not|notif0|notif1|or|output|packed|' r'parameter|pmos|posedge|primitive|pull0|pull1|pulldown|pullup|rcmos|' r'ref|release|repeat|return|rnmos|rpmos|rtran|rtranif0|' r'rtranif1|scalared|signed|small|specify|specparam|strength|' r'string|strong0|strong1|struct|table|task|' r'tran|tranif0|tranif1|type|typedef|' r'unsigned|var|vectored|void|wait|weak0|weak1|while|' r'xnor|xor)\b', Keyword), (r'`(accelerate|autoexpand_vectornets|celldefine|default_nettype|' r'else|elsif|endcelldefine|endif|endprotect|endprotected|' r'expand_vectornets|ifdef|ifndef|include|noaccelerate|noexpand_vectornets|' r'noremove_gatenames|noremove_netnames|nounconnected_drive|' r'protect|protected|remove_gatenames|remove_netnames|resetall|' r'timescale|unconnected_drive|undef)\b', Comment.Preproc), (r'\$(bits|bitstoreal|bitstoshortreal|countdrivers|display|fclose|' r'fdisplay|finish|floor|fmonitor|fopen|fstrobe|fwrite|' r'getpattern|history|incsave|input|itor|key|list|log|' r'monitor|monitoroff|monitoron|nokey|nolog|printtimescale|' r'random|readmemb|readmemh|realtime|realtobits|reset|reset_count|' r'reset_value|restart|rtoi|save|scale|scope|shortrealtobits|' r'showscopes|showvariables|showvars|sreadmemb|sreadmemh|' r'stime|stop|strobe|time|timeformat|write)\b', Name.Builtin), (r'(byte|shortint|int|longint|integer|time|' r'bit|logic|reg|' r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor' r'shortreal|real|realtime)\b', Keyword.Type), ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label), ('[a-zA-Z_][a-zA-Z0-9_]*', Name), ], 'string': [ (r'"', String, '#pop'), (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), (r'[^\\"\n]+', String), # all other characters (r'\\\n', String), # line continuation (r'\\', String), # stray backslash ], 'macro': [ (r'[^/\n]+', Comment.Preproc), (r'/[*](.|\n)*?[*]/', Comment.Multiline), (r'//.*?\n', Comment.Single, '#pop'), (r'/', Comment.Preproc), (r'(?<=\\)\n', Comment.Preproc), (r'\n', Comment.Preproc, '#pop'), ], 'import': [ (r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop') ] } def get_tokens_unprocessed(self, text): for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text): # Convention: mark all upper case names as constants if token is Name: if value.isupper(): token = Name.Constant yield index, token, value class SystemVerilogLexer(RegexLexer): """ Extends verilog lexer to recognise all SystemVerilog keywords from IEEE 1800-2009 standard. *New in Pygments 1.5.* """ name = 'systemverilog' aliases = ['systemverilog', 'sv'] filenames = ['*.sv', '*.svh'] mimetypes = ['text/x-systemverilog'] #: optional Comment or Whitespace _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+' tokens = { 'root': [ (r'^\s*`define', Comment.Preproc, 'macro'), (r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)), (r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text), 'import'), (r'\n', Text), (r'\s+', Text), (r'\\\n', Text), # line continuation (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), (r'[{}#@]', Punctuation), (r'L?"', String, 'string'), (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex), (r'([0-9]+)|(\'b)[0-1]+', Number.Hex), # should be binary (r'([0-9]+)|(\'d)[0-9]+', Number.Integer), (r'([0-9]+)|(\'o)[0-7]+', Number.Oct), (r'\'[01xz]', Number), (r'\d+[Ll]?', Number.Integer), (r'\*/', Error), (r'[~!%^&*+=|?:<>/-]', Operator), (r'[()\[\],.;\']', Punctuation), (r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), (r'(accept_on|alias|always|always_comb|always_ff|always_latch|' r'and|assert|assign|assume|automatic|before|begin|bind|bins|' r'binsof|bit|break|buf|bufif0|bufif1|byte|case|casex|casez|' r'cell|chandle|checker|class|clocking|cmos|config|const|constraint|' r'context|continue|cover|covergroup|coverpoint|cross|deassign|' r'default|defparam|design|disable|dist|do|edge|else|end|endcase|' r'endchecker|endclass|endclocking|endconfig|endfunction|endgenerate|' r'endgroup|endinterface|endmodule|endpackage|endprimitive|' r'endprogram|endproperty|endsequence|endspecify|endtable|' r'endtask|enum|event|eventually|expect|export|extends|extern|' r'final|first_match|for|force|foreach|forever|fork|forkjoin|' r'function|generate|genvar|global|highz0|highz1|if|iff|ifnone|' r'ignore_bins|illegal_bins|implies|import|incdir|include|' r'initial|inout|input|inside|instance|int|integer|interface|' r'intersect|join|join_any|join_none|large|let|liblist|library|' r'local|localparam|logic|longint|macromodule|matches|medium|' r'modport|module|nand|negedge|new|nexttime|nmos|nor|noshowcancelled|' r'not|notif0|notif1|null|or|output|package|packed|parameter|' r'pmos|posedge|primitive|priority|program|property|protected|' r'pull0|pull1|pulldown|pullup|pulsestyle_ondetect|pulsestyle_onevent|' r'pure|rand|randc|randcase|randsequence|rcmos|real|realtime|' r'ref|reg|reject_on|release|repeat|restrict|return|rnmos|' r'rpmos|rtran|rtranif0|rtranif1|s_always|s_eventually|s_nexttime|' r's_until|s_until_with|scalared|sequence|shortint|shortreal|' r'showcancelled|signed|small|solve|specify|specparam|static|' r'string|strong|strong0|strong1|struct|super|supply0|supply1|' r'sync_accept_on|sync_reject_on|table|tagged|task|this|throughout|' r'time|timeprecision|timeunit|tran|tranif0|tranif1|tri|tri0|' r'tri1|triand|trior|trireg|type|typedef|union|unique|unique0|' r'unsigned|until|until_with|untyped|use|uwire|var|vectored|' r'virtual|void|wait|wait_order|wand|weak|weak0|weak1|while|' r'wildcard|wire|with|within|wor|xnor|xor)\b', Keyword ), (r'(`__FILE__|`__LINE__|`begin_keywords|`celldefine|`default_nettype|' r'`define|`else|`elsif|`end_keywords|`endcelldefine|`endif|' r'`ifdef|`ifndef|`include|`line|`nounconnected_drive|`pragma|' r'`resetall|`timescale|`unconnected_drive|`undef|`undefineall)\b', Comment.Preproc ), (r'(\$display|\$displayb|\$displayh|\$displayo|\$dumpall|\$dumpfile|' r'\$dumpflush|\$dumplimit|\$dumpoff|\$dumpon|\$dumpports|' r'\$dumpportsall|\$dumpportsflush|\$dumpportslimit|\$dumpportsoff|' r'\$dumpportson|\$dumpvars|\$fclose|\$fdisplay|\$fdisplayb|' r'\$fdisplayh|\$fdisplayo|\$feof|\$ferror|\$fflush|\$fgetc|' r'\$fgets|\$fmonitor|\$fmonitorb|\$fmonitorh|\$fmonitoro|' r'\$fopen|\$fread|\$fscanf|\$fseek|\$fstrobe|\$fstrobeb|\$fstrobeh|' r'\$fstrobeo|\$ftell|\$fwrite|\$fwriteb|\$fwriteh|\$fwriteo|' r'\$monitor|\$monitorb|\$monitorh|\$monitoro|\$monitoroff|' r'\$monitoron|\$plusargs|\$readmemb|\$readmemh|\$rewind|\$sformat|' r'\$sformatf|\$sscanf|\$strobe|\$strobeb|\$strobeh|\$strobeo|' r'\$swrite|\$swriteb|\$swriteh|\$swriteo|\$test|\$ungetc|' r'\$value\$plusargs|\$write|\$writeb|\$writeh|\$writememb|' r'\$writememh|\$writeo)\b' , Name.Builtin ), (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), (r'(byte|shortint|int|longint|integer|time|' r'bit|logic|reg|' r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor' r'shortreal|real|realtime)\b', Keyword.Type), ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label), ('[a-zA-Z_][a-zA-Z0-9_]*', Name), ], 'classname': [ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'), ], 'string': [ (r'"', String, '#pop'), (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), (r'[^\\"\n]+', String), # all other characters (r'\\\n', String), # line continuation (r'\\', String), # stray backslash ], 'macro': [ (r'[^/\n]+', Comment.Preproc), (r'/[*](.|\n)*?[*]/', Comment.Multiline), (r'//.*?\n', Comment.Single, '#pop'), (r'/', Comment.Preproc), (r'(?<=\\)\n', Comment.Preproc), (r'\n', Comment.Preproc, '#pop'), ], 'import': [ (r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop') ] } def get_tokens_unprocessed(self, text): for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text): # Convention: mark all upper case names as constants if token is Name: if value.isupper(): token = Name.Constant yield index, token, value def analyse_text(text): if text.startswith('//') or text.startswith('/*'): return 0.5 class VhdlLexer(RegexLexer): """ For VHDL source code. *New in Pygments 1.5.* """ name = 'vhdl' aliases = ['vhdl'] filenames = ['*.vhdl', '*.vhd'] mimetypes = ['text/x-vhdl'] flags = re.MULTILINE | re.IGNORECASE tokens = { 'root': [ (r'\n', Text), (r'\s+', Text), (r'\\\n', Text), # line continuation (r'--(?![!#$%&*+./<=>?@\^|_~]).*?$', Comment.Single), (r"'(U|X|0|1|Z|W|L|H|-)'", String.Char), (r'[~!%^&*+=|?:<>/-]', Operator), (r"'[a-zA-Z_][a-zA-Z0-9_]*", Name.Attribute), (r'[()\[\],.;\']', Punctuation), (r'"[^\n\\]*"', String), (r'(library)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Keyword, Text, Name.Namespace)), (r'(use)(\s+)(entity)', bygroups(Keyword, Text, Keyword)), (r'(use)(\s+)([a-zA-Z_][\.a-zA-Z0-9_]*)', bygroups(Keyword, Text, Name.Namespace)), (r'(entity|component)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Keyword, Text, Name.Class)), (r'(architecture|configuration)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)' r'(of)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)(is)', bygroups(Keyword, Text, Name.Class, Text, Keyword, Text, Name.Class, Text, Keyword)), (r'(end)(\s+)', bygroups(using(this), Text), 'endblock'), include('types'), include('keywords'), include('numbers'), (r'[a-zA-Z_][a-zA-Z0-9_]*', Name), ], 'endblock': [ include('keywords'), (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class), (r'(\s+)', Text), (r';', Punctuation, '#pop'), ], 'types': [ (r'(boolean|bit|character|severity_level|integer|time|delay_length|' r'natural|positive|string|bit_vector|file_open_kind|' r'file_open_status|std_ulogic|std_ulogic_vector|std_logic|' r'std_logic_vector)\b', Keyword.Type), ], 'keywords': [ (r'(abs|access|after|alias|all|and|' r'architecture|array|assert|attribute|begin|block|' r'body|buffer|bus|case|component|configuration|' r'constant|disconnect|downto|else|elsif|end|' r'entity|exit|file|for|function|generate|' r'generic|group|guarded|if|impure|in|' r'inertial|inout|is|label|library|linkage|' r'literal|loop|map|mod|nand|new|' r'next|nor|not|null|of|on|' r'open|or|others|out|package|port|' r'postponed|procedure|process|pure|range|record|' r'register|reject|return|rol|ror|select|' r'severity|signal|shared|sla|sli|sra|' r'srl|subtype|then|to|transport|type|' r'units|until|use|variable|wait|when|' r'while|with|xnor|xor)\b', Keyword), ], 'numbers': [ (r'\d{1,2}#[0-9a-fA-F_]+#?', Number.Integer), (r'[0-1_]+(\.[0-1_])', Number.Integer), (r'\d+', Number.Integer), (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float), (r'H"[0-9a-fA-F_]+"', Number.Oct), (r'O"[0-7_]+"', Number.Oct), (r'B"[0-1_]+"', Number.Oct), ], }
mit
iansf/sky_engine
third_party/jinja2/parser.py
637
35186
# -*- coding: utf-8 -*- """ jinja2.parser ~~~~~~~~~~~~~ Implements the template parser. :copyright: (c) 2010 by the Jinja Team. :license: BSD, see LICENSE for more details. """ from jinja2 import nodes from jinja2.exceptions import TemplateSyntaxError, TemplateAssertionError from jinja2.lexer import describe_token, describe_token_expr from jinja2._compat import next, imap #: statements that callinto _statement_keywords = frozenset(['for', 'if', 'block', 'extends', 'print', 'macro', 'include', 'from', 'import', 'set']) _compare_operators = frozenset(['eq', 'ne', 'lt', 'lteq', 'gt', 'gteq']) class Parser(object): """This is the central parsing class Jinja2 uses. It's passed to extensions and can be used to parse expressions or statements. """ def __init__(self, environment, source, name=None, filename=None, state=None): self.environment = environment self.stream = environment._tokenize(source, name, filename, state) self.name = name self.filename = filename self.closed = False self.extensions = {} for extension in environment.iter_extensions(): for tag in extension.tags: self.extensions[tag] = extension.parse self._last_identifier = 0 self._tag_stack = [] self._end_token_stack = [] def fail(self, msg, lineno=None, exc=TemplateSyntaxError): """Convenience method that raises `exc` with the message, passed line number or last line number as well as the current name and filename. """ if lineno is None: lineno = self.stream.current.lineno raise exc(msg, lineno, self.name, self.filename) def _fail_ut_eof(self, name, end_token_stack, lineno): expected = [] for exprs in end_token_stack: expected.extend(imap(describe_token_expr, exprs)) if end_token_stack: currently_looking = ' or '.join( "'%s'" % describe_token_expr(expr) for expr in end_token_stack[-1]) else: currently_looking = None if name is None: message = ['Unexpected end of template.'] else: message = ['Encountered unknown tag \'%s\'.' % name] if currently_looking: if name is not None and name in expected: message.append('You probably made a nesting mistake. Jinja ' 'is expecting this tag, but currently looking ' 'for %s.' % currently_looking) else: message.append('Jinja was looking for the following tags: ' '%s.' % currently_looking) if self._tag_stack: message.append('The innermost block that needs to be ' 'closed is \'%s\'.' % self._tag_stack[-1]) self.fail(' '.join(message), lineno) def fail_unknown_tag(self, name, lineno=None): """Called if the parser encounters an unknown tag. Tries to fail with a human readable error message that could help to identify the problem. """ return self._fail_ut_eof(name, self._end_token_stack, lineno) def fail_eof(self, end_tokens=None, lineno=None): """Like fail_unknown_tag but for end of template situations.""" stack = list(self._end_token_stack) if end_tokens is not None: stack.append(end_tokens) return self._fail_ut_eof(None, stack, lineno) def is_tuple_end(self, extra_end_rules=None): """Are we at the end of a tuple?""" if self.stream.current.type in ('variable_end', 'block_end', 'rparen'): return True elif extra_end_rules is not None: return self.stream.current.test_any(extra_end_rules) return False def free_identifier(self, lineno=None): """Return a new free identifier as :class:`~jinja2.nodes.InternalName`.""" self._last_identifier += 1 rv = object.__new__(nodes.InternalName) nodes.Node.__init__(rv, 'fi%d' % self._last_identifier, lineno=lineno) return rv def parse_statement(self): """Parse a single statement.""" token = self.stream.current if token.type != 'name': self.fail('tag name expected', token.lineno) self._tag_stack.append(token.value) pop_tag = True try: if token.value in _statement_keywords: return getattr(self, 'parse_' + self.stream.current.value)() if token.value == 'call': return self.parse_call_block() if token.value == 'filter': return self.parse_filter_block() ext = self.extensions.get(token.value) if ext is not None: return ext(self) # did not work out, remove the token we pushed by accident # from the stack so that the unknown tag fail function can # produce a proper error message. self._tag_stack.pop() pop_tag = False self.fail_unknown_tag(token.value, token.lineno) finally: if pop_tag: self._tag_stack.pop() def parse_statements(self, end_tokens, drop_needle=False): """Parse multiple statements into a list until one of the end tokens is reached. This is used to parse the body of statements as it also parses template data if appropriate. The parser checks first if the current token is a colon and skips it if there is one. Then it checks for the block end and parses until if one of the `end_tokens` is reached. Per default the active token in the stream at the end of the call is the matched end token. If this is not wanted `drop_needle` can be set to `True` and the end token is removed. """ # the first token may be a colon for python compatibility self.stream.skip_if('colon') # in the future it would be possible to add whole code sections # by adding some sort of end of statement token and parsing those here. self.stream.expect('block_end') result = self.subparse(end_tokens) # we reached the end of the template too early, the subparser # does not check for this, so we do that now if self.stream.current.type == 'eof': self.fail_eof(end_tokens) if drop_needle: next(self.stream) return result def parse_set(self): """Parse an assign statement.""" lineno = next(self.stream).lineno target = self.parse_assign_target() self.stream.expect('assign') expr = self.parse_tuple() return nodes.Assign(target, expr, lineno=lineno) def parse_for(self): """Parse a for loop.""" lineno = self.stream.expect('name:for').lineno target = self.parse_assign_target(extra_end_rules=('name:in',)) self.stream.expect('name:in') iter = self.parse_tuple(with_condexpr=False, extra_end_rules=('name:recursive',)) test = None if self.stream.skip_if('name:if'): test = self.parse_expression() recursive = self.stream.skip_if('name:recursive') body = self.parse_statements(('name:endfor', 'name:else')) if next(self.stream).value == 'endfor': else_ = [] else: else_ = self.parse_statements(('name:endfor',), drop_needle=True) return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno) def parse_if(self): """Parse an if construct.""" node = result = nodes.If(lineno=self.stream.expect('name:if').lineno) while 1: node.test = self.parse_tuple(with_condexpr=False) node.body = self.parse_statements(('name:elif', 'name:else', 'name:endif')) token = next(self.stream) if token.test('name:elif'): new_node = nodes.If(lineno=self.stream.current.lineno) node.else_ = [new_node] node = new_node continue elif token.test('name:else'): node.else_ = self.parse_statements(('name:endif',), drop_needle=True) else: node.else_ = [] break return result def parse_block(self): node = nodes.Block(lineno=next(self.stream).lineno) node.name = self.stream.expect('name').value node.scoped = self.stream.skip_if('name:scoped') # common problem people encounter when switching from django # to jinja. we do not support hyphens in block names, so let's # raise a nicer error message in that case. if self.stream.current.type == 'sub': self.fail('Block names in Jinja have to be valid Python ' 'identifiers and may not contain hyphens, use an ' 'underscore instead.') node.body = self.parse_statements(('name:endblock',), drop_needle=True) self.stream.skip_if('name:' + node.name) return node def parse_extends(self): node = nodes.Extends(lineno=next(self.stream).lineno) node.template = self.parse_expression() return node def parse_import_context(self, node, default): if self.stream.current.test_any('name:with', 'name:without') and \ self.stream.look().test('name:context'): node.with_context = next(self.stream).value == 'with' self.stream.skip() else: node.with_context = default return node def parse_include(self): node = nodes.Include(lineno=next(self.stream).lineno) node.template = self.parse_expression() if self.stream.current.test('name:ignore') and \ self.stream.look().test('name:missing'): node.ignore_missing = True self.stream.skip(2) else: node.ignore_missing = False return self.parse_import_context(node, True) def parse_import(self): node = nodes.Import(lineno=next(self.stream).lineno) node.template = self.parse_expression() self.stream.expect('name:as') node.target = self.parse_assign_target(name_only=True).name return self.parse_import_context(node, False) def parse_from(self): node = nodes.FromImport(lineno=next(self.stream).lineno) node.template = self.parse_expression() self.stream.expect('name:import') node.names = [] def parse_context(): if self.stream.current.value in ('with', 'without') and \ self.stream.look().test('name:context'): node.with_context = next(self.stream).value == 'with' self.stream.skip() return True return False while 1: if node.names: self.stream.expect('comma') if self.stream.current.type == 'name': if parse_context(): break target = self.parse_assign_target(name_only=True) if target.name.startswith('_'): self.fail('names starting with an underline can not ' 'be imported', target.lineno, exc=TemplateAssertionError) if self.stream.skip_if('name:as'): alias = self.parse_assign_target(name_only=True) node.names.append((target.name, alias.name)) else: node.names.append(target.name) if parse_context() or self.stream.current.type != 'comma': break else: break if not hasattr(node, 'with_context'): node.with_context = False self.stream.skip_if('comma') return node def parse_signature(self, node): node.args = args = [] node.defaults = defaults = [] self.stream.expect('lparen') while self.stream.current.type != 'rparen': if args: self.stream.expect('comma') arg = self.parse_assign_target(name_only=True) arg.set_ctx('param') if self.stream.skip_if('assign'): defaults.append(self.parse_expression()) args.append(arg) self.stream.expect('rparen') def parse_call_block(self): node = nodes.CallBlock(lineno=next(self.stream).lineno) if self.stream.current.type == 'lparen': self.parse_signature(node) else: node.args = [] node.defaults = [] node.call = self.parse_expression() if not isinstance(node.call, nodes.Call): self.fail('expected call', node.lineno) node.body = self.parse_statements(('name:endcall',), drop_needle=True) return node def parse_filter_block(self): node = nodes.FilterBlock(lineno=next(self.stream).lineno) node.filter = self.parse_filter(None, start_inline=True) node.body = self.parse_statements(('name:endfilter',), drop_needle=True) return node def parse_macro(self): node = nodes.Macro(lineno=next(self.stream).lineno) node.name = self.parse_assign_target(name_only=True).name self.parse_signature(node) node.body = self.parse_statements(('name:endmacro',), drop_needle=True) return node def parse_print(self): node = nodes.Output(lineno=next(self.stream).lineno) node.nodes = [] while self.stream.current.type != 'block_end': if node.nodes: self.stream.expect('comma') node.nodes.append(self.parse_expression()) return node def parse_assign_target(self, with_tuple=True, name_only=False, extra_end_rules=None): """Parse an assignment target. As Jinja2 allows assignments to tuples, this function can parse all allowed assignment targets. Per default assignments to tuples are parsed, that can be disable however by setting `with_tuple` to `False`. If only assignments to names are wanted `name_only` can be set to `True`. The `extra_end_rules` parameter is forwarded to the tuple parsing function. """ if name_only: token = self.stream.expect('name') target = nodes.Name(token.value, 'store', lineno=token.lineno) else: if with_tuple: target = self.parse_tuple(simplified=True, extra_end_rules=extra_end_rules) else: target = self.parse_primary() target.set_ctx('store') if not target.can_assign(): self.fail('can\'t assign to %r' % target.__class__. __name__.lower(), target.lineno) return target def parse_expression(self, with_condexpr=True): """Parse an expression. Per default all expressions are parsed, if the optional `with_condexpr` parameter is set to `False` conditional expressions are not parsed. """ if with_condexpr: return self.parse_condexpr() return self.parse_or() def parse_condexpr(self): lineno = self.stream.current.lineno expr1 = self.parse_or() while self.stream.skip_if('name:if'): expr2 = self.parse_or() if self.stream.skip_if('name:else'): expr3 = self.parse_condexpr() else: expr3 = None expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno) lineno = self.stream.current.lineno return expr1 def parse_or(self): lineno = self.stream.current.lineno left = self.parse_and() while self.stream.skip_if('name:or'): right = self.parse_and() left = nodes.Or(left, right, lineno=lineno) lineno = self.stream.current.lineno return left def parse_and(self): lineno = self.stream.current.lineno left = self.parse_not() while self.stream.skip_if('name:and'): right = self.parse_not() left = nodes.And(left, right, lineno=lineno) lineno = self.stream.current.lineno return left def parse_not(self): if self.stream.current.test('name:not'): lineno = next(self.stream).lineno return nodes.Not(self.parse_not(), lineno=lineno) return self.parse_compare() def parse_compare(self): lineno = self.stream.current.lineno expr = self.parse_add() ops = [] while 1: token_type = self.stream.current.type if token_type in _compare_operators: next(self.stream) ops.append(nodes.Operand(token_type, self.parse_add())) elif self.stream.skip_if('name:in'): ops.append(nodes.Operand('in', self.parse_add())) elif self.stream.current.test('name:not') and \ self.stream.look().test('name:in'): self.stream.skip(2) ops.append(nodes.Operand('notin', self.parse_add())) else: break lineno = self.stream.current.lineno if not ops: return expr return nodes.Compare(expr, ops, lineno=lineno) def parse_add(self): lineno = self.stream.current.lineno left = self.parse_sub() while self.stream.current.type == 'add': next(self.stream) right = self.parse_sub() left = nodes.Add(left, right, lineno=lineno) lineno = self.stream.current.lineno return left def parse_sub(self): lineno = self.stream.current.lineno left = self.parse_concat() while self.stream.current.type == 'sub': next(self.stream) right = self.parse_concat() left = nodes.Sub(left, right, lineno=lineno) lineno = self.stream.current.lineno return left def parse_concat(self): lineno = self.stream.current.lineno args = [self.parse_mul()] while self.stream.current.type == 'tilde': next(self.stream) args.append(self.parse_mul()) if len(args) == 1: return args[0] return nodes.Concat(args, lineno=lineno) def parse_mul(self): lineno = self.stream.current.lineno left = self.parse_div() while self.stream.current.type == 'mul': next(self.stream) right = self.parse_div() left = nodes.Mul(left, right, lineno=lineno) lineno = self.stream.current.lineno return left def parse_div(self): lineno = self.stream.current.lineno left = self.parse_floordiv() while self.stream.current.type == 'div': next(self.stream) right = self.parse_floordiv() left = nodes.Div(left, right, lineno=lineno) lineno = self.stream.current.lineno return left def parse_floordiv(self): lineno = self.stream.current.lineno left = self.parse_mod() while self.stream.current.type == 'floordiv': next(self.stream) right = self.parse_mod() left = nodes.FloorDiv(left, right, lineno=lineno) lineno = self.stream.current.lineno return left def parse_mod(self): lineno = self.stream.current.lineno left = self.parse_pow() while self.stream.current.type == 'mod': next(self.stream) right = self.parse_pow() left = nodes.Mod(left, right, lineno=lineno) lineno = self.stream.current.lineno return left def parse_pow(self): lineno = self.stream.current.lineno left = self.parse_unary() while self.stream.current.type == 'pow': next(self.stream) right = self.parse_unary() left = nodes.Pow(left, right, lineno=lineno) lineno = self.stream.current.lineno return left def parse_unary(self, with_filter=True): token_type = self.stream.current.type lineno = self.stream.current.lineno if token_type == 'sub': next(self.stream) node = nodes.Neg(self.parse_unary(False), lineno=lineno) elif token_type == 'add': next(self.stream) node = nodes.Pos(self.parse_unary(False), lineno=lineno) else: node = self.parse_primary() node = self.parse_postfix(node) if with_filter: node = self.parse_filter_expr(node) return node def parse_primary(self): token = self.stream.current if token.type == 'name': if token.value in ('true', 'false', 'True', 'False'): node = nodes.Const(token.value in ('true', 'True'), lineno=token.lineno) elif token.value in ('none', 'None'): node = nodes.Const(None, lineno=token.lineno) else: node = nodes.Name(token.value, 'load', lineno=token.lineno) next(self.stream) elif token.type == 'string': next(self.stream) buf = [token.value] lineno = token.lineno while self.stream.current.type == 'string': buf.append(self.stream.current.value) next(self.stream) node = nodes.Const(''.join(buf), lineno=lineno) elif token.type in ('integer', 'float'): next(self.stream) node = nodes.Const(token.value, lineno=token.lineno) elif token.type == 'lparen': next(self.stream) node = self.parse_tuple(explicit_parentheses=True) self.stream.expect('rparen') elif token.type == 'lbracket': node = self.parse_list() elif token.type == 'lbrace': node = self.parse_dict() else: self.fail("unexpected '%s'" % describe_token(token), token.lineno) return node def parse_tuple(self, simplified=False, with_condexpr=True, extra_end_rules=None, explicit_parentheses=False): """Works like `parse_expression` but if multiple expressions are delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created. This method could also return a regular expression instead of a tuple if no commas where found. The default parsing mode is a full tuple. If `simplified` is `True` only names and literals are parsed. The `no_condexpr` parameter is forwarded to :meth:`parse_expression`. Because tuples do not require delimiters and may end in a bogus comma an extra hint is needed that marks the end of a tuple. For example for loops support tuples between `for` and `in`. In that case the `extra_end_rules` is set to ``['name:in']``. `explicit_parentheses` is true if the parsing was triggered by an expression in parentheses. This is used to figure out if an empty tuple is a valid expression or not. """ lineno = self.stream.current.lineno if simplified: parse = self.parse_primary elif with_condexpr: parse = self.parse_expression else: parse = lambda: self.parse_expression(with_condexpr=False) args = [] is_tuple = False while 1: if args: self.stream.expect('comma') if self.is_tuple_end(extra_end_rules): break args.append(parse()) if self.stream.current.type == 'comma': is_tuple = True else: break lineno = self.stream.current.lineno if not is_tuple: if args: return args[0] # if we don't have explicit parentheses, an empty tuple is # not a valid expression. This would mean nothing (literally # nothing) in the spot of an expression would be an empty # tuple. if not explicit_parentheses: self.fail('Expected an expression, got \'%s\'' % describe_token(self.stream.current)) return nodes.Tuple(args, 'load', lineno=lineno) def parse_list(self): token = self.stream.expect('lbracket') items = [] while self.stream.current.type != 'rbracket': if items: self.stream.expect('comma') if self.stream.current.type == 'rbracket': break items.append(self.parse_expression()) self.stream.expect('rbracket') return nodes.List(items, lineno=token.lineno) def parse_dict(self): token = self.stream.expect('lbrace') items = [] while self.stream.current.type != 'rbrace': if items: self.stream.expect('comma') if self.stream.current.type == 'rbrace': break key = self.parse_expression() self.stream.expect('colon') value = self.parse_expression() items.append(nodes.Pair(key, value, lineno=key.lineno)) self.stream.expect('rbrace') return nodes.Dict(items, lineno=token.lineno) def parse_postfix(self, node): while 1: token_type = self.stream.current.type if token_type == 'dot' or token_type == 'lbracket': node = self.parse_subscript(node) # calls are valid both after postfix expressions (getattr # and getitem) as well as filters and tests elif token_type == 'lparen': node = self.parse_call(node) else: break return node def parse_filter_expr(self, node): while 1: token_type = self.stream.current.type if token_type == 'pipe': node = self.parse_filter(node) elif token_type == 'name' and self.stream.current.value == 'is': node = self.parse_test(node) # calls are valid both after postfix expressions (getattr # and getitem) as well as filters and tests elif token_type == 'lparen': node = self.parse_call(node) else: break return node def parse_subscript(self, node): token = next(self.stream) if token.type == 'dot': attr_token = self.stream.current next(self.stream) if attr_token.type == 'name': return nodes.Getattr(node, attr_token.value, 'load', lineno=token.lineno) elif attr_token.type != 'integer': self.fail('expected name or number', attr_token.lineno) arg = nodes.Const(attr_token.value, lineno=attr_token.lineno) return nodes.Getitem(node, arg, 'load', lineno=token.lineno) if token.type == 'lbracket': args = [] while self.stream.current.type != 'rbracket': if args: self.stream.expect('comma') args.append(self.parse_subscribed()) self.stream.expect('rbracket') if len(args) == 1: arg = args[0] else: arg = nodes.Tuple(args, 'load', lineno=token.lineno) return nodes.Getitem(node, arg, 'load', lineno=token.lineno) self.fail('expected subscript expression', self.lineno) def parse_subscribed(self): lineno = self.stream.current.lineno if self.stream.current.type == 'colon': next(self.stream) args = [None] else: node = self.parse_expression() if self.stream.current.type != 'colon': return node next(self.stream) args = [node] if self.stream.current.type == 'colon': args.append(None) elif self.stream.current.type not in ('rbracket', 'comma'): args.append(self.parse_expression()) else: args.append(None) if self.stream.current.type == 'colon': next(self.stream) if self.stream.current.type not in ('rbracket', 'comma'): args.append(self.parse_expression()) else: args.append(None) else: args.append(None) return nodes.Slice(lineno=lineno, *args) def parse_call(self, node): token = self.stream.expect('lparen') args = [] kwargs = [] dyn_args = dyn_kwargs = None require_comma = False def ensure(expr): if not expr: self.fail('invalid syntax for function call expression', token.lineno) while self.stream.current.type != 'rparen': if require_comma: self.stream.expect('comma') # support for trailing comma if self.stream.current.type == 'rparen': break if self.stream.current.type == 'mul': ensure(dyn_args is None and dyn_kwargs is None) next(self.stream) dyn_args = self.parse_expression() elif self.stream.current.type == 'pow': ensure(dyn_kwargs is None) next(self.stream) dyn_kwargs = self.parse_expression() else: ensure(dyn_args is None and dyn_kwargs is None) if self.stream.current.type == 'name' and \ self.stream.look().type == 'assign': key = self.stream.current.value self.stream.skip(2) value = self.parse_expression() kwargs.append(nodes.Keyword(key, value, lineno=value.lineno)) else: ensure(not kwargs) args.append(self.parse_expression()) require_comma = True self.stream.expect('rparen') if node is None: return args, kwargs, dyn_args, dyn_kwargs return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno) def parse_filter(self, node, start_inline=False): while self.stream.current.type == 'pipe' or start_inline: if not start_inline: next(self.stream) token = self.stream.expect('name') name = token.value while self.stream.current.type == 'dot': next(self.stream) name += '.' + self.stream.expect('name').value if self.stream.current.type == 'lparen': args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None) else: args = [] kwargs = [] dyn_args = dyn_kwargs = None node = nodes.Filter(node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno) start_inline = False return node def parse_test(self, node): token = next(self.stream) if self.stream.current.test('name:not'): next(self.stream) negated = True else: negated = False name = self.stream.expect('name').value while self.stream.current.type == 'dot': next(self.stream) name += '.' + self.stream.expect('name').value dyn_args = dyn_kwargs = None kwargs = [] if self.stream.current.type == 'lparen': args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None) elif self.stream.current.type in ('name', 'string', 'integer', 'float', 'lparen', 'lbracket', 'lbrace') and not \ self.stream.current.test_any('name:else', 'name:or', 'name:and'): if self.stream.current.test('name:is'): self.fail('You cannot chain multiple tests with is') args = [self.parse_expression()] else: args = [] node = nodes.Test(node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno) if negated: node = nodes.Not(node, lineno=token.lineno) return node def subparse(self, end_tokens=None): body = [] data_buffer = [] add_data = data_buffer.append if end_tokens is not None: self._end_token_stack.append(end_tokens) def flush_data(): if data_buffer: lineno = data_buffer[0].lineno body.append(nodes.Output(data_buffer[:], lineno=lineno)) del data_buffer[:] try: while self.stream: token = self.stream.current if token.type == 'data': if token.value: add_data(nodes.TemplateData(token.value, lineno=token.lineno)) next(self.stream) elif token.type == 'variable_begin': next(self.stream) add_data(self.parse_tuple(with_condexpr=True)) self.stream.expect('variable_end') elif token.type == 'block_begin': flush_data() next(self.stream) if end_tokens is not None and \ self.stream.current.test_any(*end_tokens): return body rv = self.parse_statement() if isinstance(rv, list): body.extend(rv) else: body.append(rv) self.stream.expect('block_end') else: raise AssertionError('internal parsing error') flush_data() finally: if end_tokens is not None: self._end_token_stack.pop() return body def parse(self): """Parse the whole template into a `Template` node.""" result = nodes.Template(self.subparse(), lineno=1) result.set_environment(self.environment) return result
bsd-3-clause
disigma/depot_tools
third_party/coverage/data.py
209
9188
"""Coverage data for Coverage.""" import os from coverage.backward import iitems, pickle, sorted # pylint: disable=W0622 from coverage.files import PathAliases from coverage.misc import file_be_gone class CoverageData(object): """Manages collected coverage data, including file storage. The data file format is a pickled dict, with these keys: * collector: a string identifying the collecting software * lines: a dict mapping filenames to sorted lists of line numbers executed: { 'file1': [17,23,45], 'file2': [1,2,3], ... } * arcs: a dict mapping filenames to sorted lists of line number pairs: { 'file1': [(17,23), (17,25), (25,26)], ... } """ def __init__(self, basename=None, collector=None, debug=None): """Create a CoverageData. `basename` is the name of the file to use for storing data. `collector` is a string describing the coverage measurement software. `debug` is a `DebugControl` object for writing debug messages. """ self.collector = collector or 'unknown' self.debug = debug self.use_file = True # Construct the filename that will be used for data file storage, if we # ever do any file storage. self.filename = basename or ".coverage" self.filename = os.path.abspath(self.filename) # A map from canonical Python source file name to a dictionary in # which there's an entry for each line number that has been # executed: # # { # 'filename1.py': { 12: None, 47: None, ... }, # ... # } # self.lines = {} # A map from canonical Python source file name to a dictionary with an # entry for each pair of line numbers forming an arc: # # { # 'filename1.py': { (12,14): None, (47,48): None, ... }, # ... # } # self.arcs = {} def usefile(self, use_file=True): """Set whether or not to use a disk file for data.""" self.use_file = use_file def read(self): """Read coverage data from the coverage data file (if it exists).""" if self.use_file: self.lines, self.arcs = self._read_file(self.filename) else: self.lines, self.arcs = {}, {} def write(self, suffix=None): """Write the collected coverage data to a file. `suffix` is a suffix to append to the base file name. This can be used for multiple or parallel execution, so that many coverage data files can exist simultaneously. A dot will be used to join the base name and the suffix. """ if self.use_file: filename = self.filename if suffix: filename += "." + suffix self.write_file(filename) def erase(self): """Erase the data, both in this object, and from its file storage.""" if self.use_file: if self.filename: file_be_gone(self.filename) self.lines = {} self.arcs = {} def line_data(self): """Return the map from filenames to lists of line numbers executed.""" return dict( [(f, sorted(lmap.keys())) for f, lmap in iitems(self.lines)] ) def arc_data(self): """Return the map from filenames to lists of line number pairs.""" return dict( [(f, sorted(amap.keys())) for f, amap in iitems(self.arcs)] ) def write_file(self, filename): """Write the coverage data to `filename`.""" # Create the file data. data = {} data['lines'] = self.line_data() arcs = self.arc_data() if arcs: data['arcs'] = arcs if self.collector: data['collector'] = self.collector if self.debug and self.debug.should('dataio'): self.debug.write("Writing data to %r" % (filename,)) # Write the pickle to the file. fdata = open(filename, 'wb') try: pickle.dump(data, fdata, 2) finally: fdata.close() def read_file(self, filename): """Read the coverage data from `filename`.""" self.lines, self.arcs = self._read_file(filename) def raw_data(self, filename): """Return the raw pickled data from `filename`.""" if self.debug and self.debug.should('dataio'): self.debug.write("Reading data from %r" % (filename,)) fdata = open(filename, 'rb') try: data = pickle.load(fdata) finally: fdata.close() return data def _read_file(self, filename): """Return the stored coverage data from the given file. Returns two values, suitable for assigning to `self.lines` and `self.arcs`. """ lines = {} arcs = {} try: data = self.raw_data(filename) if isinstance(data, dict): # Unpack the 'lines' item. lines = dict([ (f, dict.fromkeys(linenos, None)) for f, linenos in iitems(data.get('lines', {})) ]) # Unpack the 'arcs' item. arcs = dict([ (f, dict.fromkeys(arcpairs, None)) for f, arcpairs in iitems(data.get('arcs', {})) ]) except Exception: pass return lines, arcs def combine_parallel_data(self, aliases=None): """Combine a number of data files together. Treat `self.filename` as a file prefix, and combine the data from all of the data files starting with that prefix plus a dot. If `aliases` is provided, it's a `PathAliases` object that is used to re-map paths to match the local machine's. """ aliases = aliases or PathAliases() data_dir, local = os.path.split(self.filename) localdot = local + '.' for f in os.listdir(data_dir or '.'): if f.startswith(localdot): full_path = os.path.join(data_dir, f) new_lines, new_arcs = self._read_file(full_path) for filename, file_data in iitems(new_lines): filename = aliases.map(filename) self.lines.setdefault(filename, {}).update(file_data) for filename, file_data in iitems(new_arcs): filename = aliases.map(filename) self.arcs.setdefault(filename, {}).update(file_data) if f != local: os.remove(full_path) def add_line_data(self, line_data): """Add executed line data. `line_data` is { filename: { lineno: None, ... }, ...} """ for filename, linenos in iitems(line_data): self.lines.setdefault(filename, {}).update(linenos) def add_arc_data(self, arc_data): """Add measured arc data. `arc_data` is { filename: { (l1,l2): None, ... }, ...} """ for filename, arcs in iitems(arc_data): self.arcs.setdefault(filename, {}).update(arcs) def touch_file(self, filename): """Ensure that `filename` appears in the data, empty if needed.""" self.lines.setdefault(filename, {}) def measured_files(self): """A list of all files that had been measured.""" return list(self.lines.keys()) def executed_lines(self, filename): """A map containing all the line numbers executed in `filename`. If `filename` hasn't been collected at all (because it wasn't executed) then return an empty map. """ return self.lines.get(filename) or {} def executed_arcs(self, filename): """A map containing all the arcs executed in `filename`.""" return self.arcs.get(filename) or {} def add_to_hash(self, filename, hasher): """Contribute `filename`'s data to the Md5Hash `hasher`.""" hasher.update(self.executed_lines(filename)) hasher.update(self.executed_arcs(filename)) def summary(self, fullpath=False): """Return a dict summarizing the coverage data. Keys are based on the filenames, and values are the number of executed lines. If `fullpath` is true, then the keys are the full pathnames of the files, otherwise they are the basenames of the files. """ summ = {} if fullpath: filename_fn = lambda f: f else: filename_fn = os.path.basename for filename, lines in iitems(self.lines): summ[filename_fn(filename)] = len(lines) return summ def has_arcs(self): """Does this data have arcs?""" return bool(self.arcs) if __name__ == '__main__': # Ad-hoc: show the raw data in a data file. import pprint, sys covdata = CoverageData() if sys.argv[1:]: fname = sys.argv[1] else: fname = covdata.filename pprint.pprint(covdata.raw_data(fname))
bsd-3-clause
sudovijay/youtube-dl
youtube_dl/extractor/vimeo.py
19
27619
# encoding: utf-8 from __future__ import unicode_literals import json import re import itertools from .common import InfoExtractor from ..compat import ( compat_HTTPError, compat_urllib_parse, compat_urllib_request, compat_urlparse, ) from ..utils import ( ExtractorError, InAdvancePagedList, int_or_none, RegexNotFoundError, smuggle_url, std_headers, unified_strdate, unsmuggle_url, urlencode_postdata, unescapeHTML, ) class VimeoBaseInfoExtractor(InfoExtractor): _NETRC_MACHINE = 'vimeo' _LOGIN_REQUIRED = False def _login(self): (username, password) = self._get_login_info() if username is None: if self._LOGIN_REQUIRED: raise ExtractorError('No login info available, needed for using %s.' % self.IE_NAME, expected=True) return self.report_login() login_url = 'https://vimeo.com/log_in' webpage = self._download_webpage(login_url, None, False) token = self._search_regex(r'xsrft":"(.*?)"', webpage, 'login token') data = urlencode_postdata({ 'email': username, 'password': password, 'action': 'login', 'service': 'vimeo', 'token': token, }) login_request = compat_urllib_request.Request(login_url, data) login_request.add_header('Content-Type', 'application/x-www-form-urlencoded') login_request.add_header('Cookie', 'xsrft=%s' % token) self._download_webpage(login_request, None, False, 'Wrong login info') class VimeoIE(VimeoBaseInfoExtractor): """Information extractor for vimeo.com.""" # _VALID_URL matches Vimeo URLs _VALID_URL = r'''(?x) https?:// (?:(?:www|(?P<player>player))\.)? vimeo(?P<pro>pro)?\.com/ (?!channels/[^/?#]+/?(?:$|[?#])|album/) (?:.*?/)? (?:(?:play_redirect_hls|moogaloop\.swf)\?clip_id=)? (?:videos?/)? (?P<id>[0-9]+) /?(?:[?&].*)?(?:[#].*)?$''' IE_NAME = 'vimeo' _TESTS = [ { 'url': 'http://vimeo.com/56015672#at=0', 'md5': '8879b6cc097e987f02484baf890129e5', 'info_dict': { 'id': '56015672', 'ext': 'mp4', "upload_date": "20121220", "description": "This is a test case for youtube-dl.\nFor more information, see github.com/rg3/youtube-dl\nTest chars: \u2605 \" ' \u5e78 / \\ \u00e4 \u21ad \U0001d550", "uploader_id": "user7108434", "uploader": "Filippo Valsorda", "title": "youtube-dl test video - \u2605 \" ' \u5e78 / \\ \u00e4 \u21ad \U0001d550", "duration": 10, }, }, { 'url': 'http://vimeopro.com/openstreetmapus/state-of-the-map-us-2013/video/68093876', 'md5': '3b5ca6aa22b60dfeeadf50b72e44ed82', 'note': 'Vimeo Pro video (#1197)', 'info_dict': { 'id': '68093876', 'ext': 'mp4', 'uploader_id': 'openstreetmapus', 'uploader': 'OpenStreetMap US', 'title': 'Andy Allan - Putting the Carto into OpenStreetMap Cartography', 'description': 'md5:380943ec71b89736ff4bf27183233d09', 'duration': 1595, }, }, { 'url': 'http://player.vimeo.com/video/54469442', 'md5': '619b811a4417aa4abe78dc653becf511', 'note': 'Videos that embed the url in the player page', 'info_dict': { 'id': '54469442', 'ext': 'mp4', 'title': 'Kathy Sierra: Building the minimum Badass User, Business of Software 2012', 'uploader': 'The BLN & Business of Software', 'uploader_id': 'theblnbusinessofsoftware', 'duration': 3610, 'description': None, }, }, { 'url': 'http://vimeo.com/68375962', 'md5': 'aaf896bdb7ddd6476df50007a0ac0ae7', 'note': 'Video protected with password', 'info_dict': { 'id': '68375962', 'ext': 'mp4', 'title': 'youtube-dl password protected test video', 'upload_date': '20130614', 'uploader_id': 'user18948128', 'uploader': 'Jaime Marquínez Ferrándiz', 'duration': 10, 'description': 'This is "youtube-dl password protected test video" by Jaime Marquínez Ferrándiz on Vimeo, the home for high quality videos and the people who love them.', }, 'params': { 'videopassword': 'youtube-dl', }, }, { 'url': 'http://vimeo.com/channels/keypeele/75629013', 'md5': '2f86a05afe9d7abc0b9126d229bbe15d', 'note': 'Video is freely available via original URL ' 'and protected with password when accessed via http://vimeo.com/75629013', 'info_dict': { 'id': '75629013', 'ext': 'mp4', 'title': 'Key & Peele: Terrorist Interrogation', 'description': 'md5:8678b246399b070816b12313e8b4eb5c', 'uploader_id': 'atencio', 'uploader': 'Peter Atencio', 'upload_date': '20130927', 'duration': 187, }, }, { 'url': 'http://vimeo.com/76979871', 'md5': '3363dd6ffebe3784d56f4132317fd446', 'note': 'Video with subtitles', 'info_dict': { 'id': '76979871', 'ext': 'mp4', 'title': 'The New Vimeo Player (You Know, For Videos)', 'description': 'md5:2ec900bf97c3f389378a96aee11260ea', 'upload_date': '20131015', 'uploader_id': 'staff', 'uploader': 'Vimeo Staff', 'duration': 62, } }, { # from https://www.ouya.tv/game/Pier-Solar-and-the-Great-Architects/ 'url': 'https://player.vimeo.com/video/98044508', 'note': 'The js code contains assignments to the same variable as the config', 'info_dict': { 'id': '98044508', 'ext': 'mp4', 'title': 'Pier Solar OUYA Official Trailer', 'uploader': 'Tulio Gonçalves', 'uploader_id': 'user28849593', }, }, ] @staticmethod def _extract_vimeo_url(url, webpage): # Look for embedded (iframe) Vimeo player mobj = re.search( r'<iframe[^>]+?src=(["\'])(?P<url>(?:https?:)?//player\.vimeo\.com/video/.+?)\1', webpage) if mobj: player_url = unescapeHTML(mobj.group('url')) surl = smuggle_url(player_url, {'Referer': url}) return surl # Look for embedded (swf embed) Vimeo player mobj = re.search( r'<embed[^>]+?src="((?:https?:)?//(?:www\.)?vimeo\.com/moogaloop\.swf.+?)"', webpage) if mobj: return mobj.group(1) def _verify_video_password(self, url, video_id, webpage): password = self._downloader.params.get('videopassword', None) if password is None: raise ExtractorError('This video is protected by a password, use the --video-password option', expected=True) token = self._search_regex(r'xsrft[\s=:"\']+([^"\']+)', webpage, 'login token') data = urlencode_postdata({ 'password': password, 'token': token, }) if url.startswith('http://'): # vimeo only supports https now, but the user can give an http url url = url.replace('http://', 'https://') password_request = compat_urllib_request.Request(url + '/password', data) password_request.add_header('Content-Type', 'application/x-www-form-urlencoded') password_request.add_header('Cookie', 'xsrft=%s' % token) return self._download_webpage( password_request, video_id, 'Verifying the password', 'Wrong password') def _verify_player_video_password(self, url, video_id): password = self._downloader.params.get('videopassword', None) if password is None: raise ExtractorError('This video is protected by a password, use the --video-password option') data = compat_urllib_parse.urlencode({'password': password}) pass_url = url + '/check-password' password_request = compat_urllib_request.Request(pass_url, data) password_request.add_header('Content-Type', 'application/x-www-form-urlencoded') return self._download_json( password_request, video_id, 'Verifying the password', 'Wrong password') def _real_initialize(self): self._login() def _real_extract(self, url): url, data = unsmuggle_url(url) headers = std_headers if data is not None: headers = headers.copy() headers.update(data) if 'Referer' not in headers: headers['Referer'] = url # Extract ID from URL mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') orig_url = url if mobj.group('pro') or mobj.group('player'): url = 'https://player.vimeo.com/video/' + video_id else: url = 'https://vimeo.com/' + video_id # Retrieve video webpage to extract further information request = compat_urllib_request.Request(url, None, headers) try: webpage = self._download_webpage(request, video_id) except ExtractorError as ee: if isinstance(ee.cause, compat_HTTPError) and ee.cause.code == 403: errmsg = ee.cause.read() if b'Because of its privacy settings, this video cannot be played here' in errmsg: raise ExtractorError( 'Cannot download embed-only video without embedding ' 'URL. Please call youtube-dl with the URL of the page ' 'that embeds this video.', expected=True) raise # Now we begin extracting as much information as we can from what we # retrieved. First we extract the information common to all extractors, # and latter we extract those that are Vimeo specific. self.report_extraction(video_id) vimeo_config = self._search_regex( r'vimeo\.config\s*=\s*({.+?});', webpage, 'vimeo config', default=None) if vimeo_config: seed_status = self._parse_json(vimeo_config, video_id).get('seed_status', {}) if seed_status.get('state') == 'failed': raise ExtractorError( '%s returned error: %s' % (self.IE_NAME, seed_status['title']), expected=True) # Extract the config JSON try: try: config_url = self._html_search_regex( r' data-config-url="(.+?)"', webpage, 'config URL') config_json = self._download_webpage(config_url, video_id) config = json.loads(config_json) except RegexNotFoundError: # For pro videos or player.vimeo.com urls # We try to find out to which variable is assigned the config dic m_variable_name = re.search('(\w)\.video\.id', webpage) if m_variable_name is not None: config_re = r'%s=({[^}].+?});' % re.escape(m_variable_name.group(1)) else: config_re = [r' = {config:({.+?}),assets:', r'(?:[abc])=({.+?});'] config = self._search_regex(config_re, webpage, 'info section', flags=re.DOTALL) config = json.loads(config) except Exception as e: if re.search('The creator of this video has not given you permission to embed it on this domain.', webpage): raise ExtractorError('The author has restricted the access to this video, try with the "--referer" option') if re.search(r'<form[^>]+?id="pw_form"', webpage) is not None: if data and '_video_password_verified' in data: raise ExtractorError('video password verification failed!') self._verify_video_password(url, video_id, webpage) return self._real_extract( smuggle_url(url, {'_video_password_verified': 'verified'})) else: raise ExtractorError('Unable to extract info section', cause=e) else: if config.get('view') == 4: config = self._verify_player_video_password(url, video_id) # Extract title video_title = config["video"]["title"] # Extract uploader and uploader_id video_uploader = config["video"]["owner"]["name"] video_uploader_id = config["video"]["owner"]["url"].split('/')[-1] if config["video"]["owner"]["url"] else None # Extract video thumbnail video_thumbnail = config["video"].get("thumbnail") if video_thumbnail is None: video_thumbs = config["video"].get("thumbs") if video_thumbs and isinstance(video_thumbs, dict): _, video_thumbnail = sorted((int(width if width.isdigit() else 0), t_url) for (width, t_url) in video_thumbs.items())[-1] # Extract video description video_description = self._html_search_regex( r'(?s)<div\s+class="[^"]*description[^"]*"[^>]*>(.*?)</div>', webpage, 'description', default=None) if not video_description: video_description = self._html_search_meta( 'description', webpage, default=None) if not video_description and mobj.group('pro'): orig_webpage = self._download_webpage( orig_url, video_id, note='Downloading webpage for description', fatal=False) if orig_webpage: video_description = self._html_search_meta( 'description', orig_webpage, default=None) if not video_description and not mobj.group('player'): self._downloader.report_warning('Cannot find video description') # Extract video duration video_duration = int_or_none(config["video"].get("duration")) # Extract upload date video_upload_date = None mobj = re.search(r'<time[^>]+datetime="([^"]+)"', webpage) if mobj is not None: video_upload_date = unified_strdate(mobj.group(1)) try: view_count = int(self._search_regex(r'UserPlays:(\d+)', webpage, 'view count')) like_count = int(self._search_regex(r'UserLikes:(\d+)', webpage, 'like count')) comment_count = int(self._search_regex(r'UserComments:(\d+)', webpage, 'comment count')) except RegexNotFoundError: # This info is only available in vimeo.com/{id} urls view_count = None like_count = None comment_count = None # Vimeo specific: extract request signature and timestamp sig = config['request']['signature'] timestamp = config['request']['timestamp'] # Vimeo specific: extract video codec and quality information # First consider quality, then codecs, then take everything codecs = [('vp6', 'flv'), ('vp8', 'flv'), ('h264', 'mp4')] files = {'hd': [], 'sd': [], 'other': []} config_files = config["video"].get("files") or config["request"].get("files") for codec_name, codec_extension in codecs: for quality in config_files.get(codec_name, []): format_id = '-'.join((codec_name, quality)).lower() key = quality if quality in files else 'other' video_url = None if isinstance(config_files[codec_name], dict): file_info = config_files[codec_name][quality] video_url = file_info.get('url') else: file_info = {} if video_url is None: video_url = "http://player.vimeo.com/play_redirect?clip_id=%s&sig=%s&time=%s&quality=%s&codecs=%s&type=moogaloop_local&embed_location=" \ % (video_id, sig, timestamp, quality, codec_name.upper()) files[key].append({ 'ext': codec_extension, 'url': video_url, 'format_id': format_id, 'width': file_info.get('width'), 'height': file_info.get('height'), }) formats = [] for key in ('other', 'sd', 'hd'): formats += files[key] if len(formats) == 0: raise ExtractorError('No known codec found') subtitles = {} text_tracks = config['request'].get('text_tracks') if text_tracks: for tt in text_tracks: subtitles[tt['lang']] = [{ 'ext': 'vtt', 'url': 'https://vimeo.com' + tt['url'], }] return { 'id': video_id, 'uploader': video_uploader, 'uploader_id': video_uploader_id, 'upload_date': video_upload_date, 'title': video_title, 'thumbnail': video_thumbnail, 'description': video_description, 'duration': video_duration, 'formats': formats, 'webpage_url': url, 'view_count': view_count, 'like_count': like_count, 'comment_count': comment_count, 'subtitles': subtitles, } class VimeoChannelIE(InfoExtractor): IE_NAME = 'vimeo:channel' _VALID_URL = r'https://vimeo\.com/channels/(?P<id>[^/?#]+)/?(?:$|[?#])' _MORE_PAGES_INDICATOR = r'<a.+?rel="next"' _TITLE_RE = r'<link rel="alternate"[^>]+?title="(.*?)"' _TESTS = [{ 'url': 'https://vimeo.com/channels/tributes', 'info_dict': { 'id': 'tributes', 'title': 'Vimeo Tributes', }, 'playlist_mincount': 25, }] def _page_url(self, base_url, pagenum): return '%s/videos/page:%d/' % (base_url, pagenum) def _extract_list_title(self, webpage): return self._html_search_regex(self._TITLE_RE, webpage, 'list title') def _login_list_password(self, page_url, list_id, webpage): login_form = self._search_regex( r'(?s)<form[^>]+?id="pw_form"(.*?)</form>', webpage, 'login form', default=None) if not login_form: return webpage password = self._downloader.params.get('videopassword', None) if password is None: raise ExtractorError('This album is protected by a password, use the --video-password option', expected=True) fields = self._hidden_inputs(login_form) token = self._search_regex(r'xsrft[\s=:"\']+([^"\']+)', webpage, 'login token') fields['token'] = token fields['password'] = password post = urlencode_postdata(fields) password_path = self._search_regex( r'action="([^"]+)"', login_form, 'password URL') password_url = compat_urlparse.urljoin(page_url, password_path) password_request = compat_urllib_request.Request(password_url, post) password_request.add_header('Content-type', 'application/x-www-form-urlencoded') self._set_cookie('vimeo.com', 'xsrft', token) return self._download_webpage( password_request, list_id, 'Verifying the password', 'Wrong password') def _extract_videos(self, list_id, base_url): video_ids = [] for pagenum in itertools.count(1): page_url = self._page_url(base_url, pagenum) webpage = self._download_webpage( page_url, list_id, 'Downloading page %s' % pagenum) if pagenum == 1: webpage = self._login_list_password(page_url, list_id, webpage) video_ids.extend(re.findall(r'id="clip_(\d+?)"', webpage)) if re.search(self._MORE_PAGES_INDICATOR, webpage, re.DOTALL) is None: break entries = [self.url_result('https://vimeo.com/%s' % video_id, 'Vimeo') for video_id in video_ids] return {'_type': 'playlist', 'id': list_id, 'title': self._extract_list_title(webpage), 'entries': entries, } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) channel_id = mobj.group('id') return self._extract_videos(channel_id, 'https://vimeo.com/channels/%s' % channel_id) class VimeoUserIE(VimeoChannelIE): IE_NAME = 'vimeo:user' _VALID_URL = r'https://vimeo\.com/(?![0-9]+(?:$|[?#/]))(?P<name>[^/]+)(?:/videos|[#?]|$)' _TITLE_RE = r'<a[^>]+?class="user">([^<>]+?)</a>' _TESTS = [{ 'url': 'https://vimeo.com/nkistudio/videos', 'info_dict': { 'title': 'Nki', 'id': 'nkistudio', }, 'playlist_mincount': 66, }] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) name = mobj.group('name') return self._extract_videos(name, 'https://vimeo.com/%s' % name) class VimeoAlbumIE(VimeoChannelIE): IE_NAME = 'vimeo:album' _VALID_URL = r'https://vimeo\.com/album/(?P<id>\d+)' _TITLE_RE = r'<header id="page_header">\n\s*<h1>(.*?)</h1>' _TESTS = [{ 'url': 'https://vimeo.com/album/2632481', 'info_dict': { 'id': '2632481', 'title': 'Staff Favorites: November 2013', }, 'playlist_mincount': 13, }, { 'note': 'Password-protected album', 'url': 'https://vimeo.com/album/3253534', 'info_dict': { 'title': 'test', 'id': '3253534', }, 'playlist_count': 1, 'params': { 'videopassword': 'youtube-dl', } }] def _page_url(self, base_url, pagenum): return '%s/page:%d/' % (base_url, pagenum) def _real_extract(self, url): album_id = self._match_id(url) return self._extract_videos(album_id, 'https://vimeo.com/album/%s' % album_id) class VimeoGroupsIE(VimeoAlbumIE): IE_NAME = 'vimeo:group' _VALID_URL = r'https://vimeo\.com/groups/(?P<name>[^/]+)' _TESTS = [{ 'url': 'https://vimeo.com/groups/rolexawards', 'info_dict': { 'id': 'rolexawards', 'title': 'Rolex Awards for Enterprise', }, 'playlist_mincount': 73, }] def _extract_list_title(self, webpage): return self._og_search_title(webpage) def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) name = mobj.group('name') return self._extract_videos(name, 'https://vimeo.com/groups/%s' % name) class VimeoReviewIE(InfoExtractor): IE_NAME = 'vimeo:review' IE_DESC = 'Review pages on vimeo' _VALID_URL = r'https://vimeo\.com/[^/]+/review/(?P<id>[^/]+)' _TESTS = [{ 'url': 'https://vimeo.com/user21297594/review/75524534/3c257a1b5d', 'md5': 'c507a72f780cacc12b2248bb4006d253', 'info_dict': { 'id': '75524534', 'ext': 'mp4', 'title': "DICK HARDWICK 'Comedian'", 'uploader': 'Richard Hardwick', } }, { 'note': 'video player needs Referer', 'url': 'https://vimeo.com/user22258446/review/91613211/13f927e053', 'md5': '6295fdab8f4bf6a002d058b2c6dce276', 'info_dict': { 'id': '91613211', 'ext': 'mp4', 'title': 're:(?i)^Death by dogma versus assembling agile . Sander Hoogendoorn', 'uploader': 'DevWeek Events', 'duration': 2773, 'thumbnail': 're:^https?://.*\.jpg$', } }] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') player_url = 'https://player.vimeo.com/player/' + video_id return self.url_result(player_url, 'Vimeo', video_id) class VimeoWatchLaterIE(VimeoBaseInfoExtractor, VimeoChannelIE): IE_NAME = 'vimeo:watchlater' IE_DESC = 'Vimeo watch later list, "vimeowatchlater" keyword (requires authentication)' _VALID_URL = r'https://vimeo\.com/home/watchlater|:vimeowatchlater' _LOGIN_REQUIRED = True _TITLE_RE = r'href="/home/watchlater".*?>(.*?)<' _TESTS = [{ 'url': 'https://vimeo.com/home/watchlater', 'only_matching': True, }] def _real_initialize(self): self._login() def _page_url(self, base_url, pagenum): url = '%s/page:%d/' % (base_url, pagenum) request = compat_urllib_request.Request(url) # Set the header to get a partial html page with the ids, # the normal page doesn't contain them. request.add_header('X-Requested-With', 'XMLHttpRequest') return request def _real_extract(self, url): return self._extract_videos('watchlater', 'https://vimeo.com/home/watchlater') class VimeoLikesIE(InfoExtractor): _VALID_URL = r'https://(?:www\.)?vimeo\.com/user(?P<id>[0-9]+)/likes/?(?:$|[?#]|sort:)' IE_NAME = 'vimeo:likes' IE_DESC = 'Vimeo user likes' _TEST = { 'url': 'https://vimeo.com/user755559/likes/', 'playlist_mincount': 293, "info_dict": { 'id': 'user755559_likes', "description": "See all the videos urza likes", "title": 'Videos urza likes', }, } def _real_extract(self, url): user_id = self._match_id(url) webpage = self._download_webpage(url, user_id) page_count = self._int( self._search_regex( r'''(?x)<li><a\s+href="[^"]+"\s+data-page="([0-9]+)"> .*?</a></li>\s*<li\s+class="pagination_next"> ''', webpage, 'page count'), 'page count', fatal=True) PAGE_SIZE = 12 title = self._html_search_regex( r'(?s)<h1>(.+?)</h1>', webpage, 'title', fatal=False) description = self._html_search_meta('description', webpage) def _get_page(idx): page_url = 'https://vimeo.com/user%s/likes/page:%d/sort:date' % ( user_id, idx + 1) webpage = self._download_webpage( page_url, user_id, note='Downloading page %d/%d' % (idx + 1, page_count)) video_list = self._search_regex( r'(?s)<ol class="js-browse_list[^"]+"[^>]*>(.*?)</ol>', webpage, 'video content') paths = re.findall( r'<li[^>]*>\s*<a\s+href="([^"]+)"', video_list) for path in paths: yield { '_type': 'url', 'url': compat_urlparse.urljoin(page_url, path), } pl = InAdvancePagedList(_get_page, page_count, PAGE_SIZE) return { '_type': 'playlist', 'id': 'user%s_likes' % user_id, 'title': title, 'description': description, 'entries': pl, }
unlicense
tinkerinestudio/Tinkerine-Suite
TinkerineSuite/pypy/lib-python/2.7/plat-irix6/WAIT.py
66
5559
# Generated by h2py from /usr/include/sys/wait.h from warnings import warnpy3k warnpy3k("the WAIT module has been removed in Python 3.0", stacklevel=2) del warnpy3k # Included from standards.h def _W_INT(i): return (i) WUNTRACED = 0004 WNOHANG = 0100 _WSTOPPED = 0177 def WIFEXITED(stat): return ((_W_INT(stat)&0377)==0) def WEXITSTATUS(stat): return ((_W_INT(stat)>>8)&0377) def WTERMSIG(stat): return (_W_INT(stat)&0177) def WSTOPSIG(stat): return ((_W_INT(stat)>>8)&0377) WEXITED = 0001 WTRAPPED = 0002 WSTOPPED = 0004 WCONTINUED = 0010 WNOWAIT = 0200 WOPTMASK = (WEXITED|WTRAPPED|WSTOPPED|WCONTINUED|WNOHANG|WNOWAIT) WSTOPFLG = 0177 WCONTFLG = 0177777 WCOREFLAG = 0200 WSIGMASK = 0177 def WWORD(stat): return (_W_INT(stat)&0177777) def WIFCONTINUED(stat): return (WWORD(stat)==WCONTFLG) def WCOREDUMP(stat): return (_W_INT(stat) & WCOREFLAG) # Included from sys/types.h # Included from sgidefs.h _MIPS_ISA_MIPS1 = 1 _MIPS_ISA_MIPS2 = 2 _MIPS_ISA_MIPS3 = 3 _MIPS_ISA_MIPS4 = 4 _MIPS_SIM_ABI32 = 1 _MIPS_SIM_NABI32 = 2 _MIPS_SIM_ABI64 = 3 P_MYID = (-1) P_MYHOSTID = (-1) # Included from sys/bsd_types.h # Included from sys/mkdev.h ONBITSMAJOR = 7 ONBITSMINOR = 8 OMAXMAJ = 0x7f OMAXMIN = 0xff NBITSMAJOR = 14 NBITSMINOR = 18 MAXMAJ = 0x1ff MAXMIN = 0x3ffff OLDDEV = 0 NEWDEV = 1 MKDEV_VER = NEWDEV def major(dev): return __major(MKDEV_VER, dev) def minor(dev): return __minor(MKDEV_VER, dev) # Included from sys/select.h FD_SETSIZE = 1024 __NBBY = 8 # Included from string.h NULL = 0L NBBY = 8 # Included from sys/procset.h P_INITPID = 1 P_INITUID = 0 P_INITPGID = 0 # Included from sys/signal.h SIGHUP = 1 SIGINT = 2 SIGQUIT = 3 SIGILL = 4 SIGTRAP = 5 SIGIOT = 6 SIGABRT = 6 SIGEMT = 7 SIGFPE = 8 SIGKILL = 9 SIGBUS = 10 SIGSEGV = 11 SIGSYS = 12 SIGPIPE = 13 SIGALRM = 14 SIGTERM = 15 SIGUSR1 = 16 SIGUSR2 = 17 SIGCLD = 18 SIGCHLD = 18 SIGPWR = 19 SIGWINCH = 20 SIGURG = 21 SIGPOLL = 22 SIGIO = 22 SIGSTOP = 23 SIGTSTP = 24 SIGCONT = 25 SIGTTIN = 26 SIGTTOU = 27 SIGVTALRM = 28 SIGPROF = 29 SIGXCPU = 30 SIGXFSZ = 31 SIG32 = 32 SIGCKPT = 33 SIGRTMIN = 49 SIGRTMAX = 64 SIGPTINTR = 47 SIGPTRESCHED = 48 __sigargs = int SIGEV_NONE = 128 SIGEV_SIGNAL = 129 SIGEV_CALLBACK = 130 # Included from sys/siginfo.h ILL_ILLOPC = 1 ILL_ILLOPN = 2 ILL_ILLADR = 3 ILL_ILLTRP = 4 ILL_PRVOPC = 5 ILL_PRVREG = 6 ILL_COPROC = 7 ILL_BADSTK = 8 NSIGILL = 8 FPE_INTDIV = 1 FPE_INTOVF = 2 FPE_FLTDIV = 3 FPE_FLTOVF = 4 FPE_FLTUND = 5 FPE_FLTRES = 6 FPE_FLTINV = 7 FPE_FLTSUB = 8 NSIGFPE = 8 SEGV_MAPERR = 1 SEGV_ACCERR = 2 NSIGSEGV = 2 BUS_ADRALN = 1 BUS_ADRERR = 2 BUS_OBJERR = 3 NSIGBUS = 3 TRAP_BRKPT = 1 TRAP_TRACE = 2 NSIGTRAP = 2 CLD_EXITED = 1 CLD_KILLED = 2 CLD_DUMPED = 3 CLD_TRAPPED = 4 CLD_STOPPED = 5 CLD_CONTINUED = 6 NSIGCLD = 6 POLL_IN = 1 POLL_OUT = 2 POLL_MSG = 3 POLL_ERR = 4 POLL_PRI = 5 POLL_HUP = 6 NSIGPOLL = 6 SI_MAXSZ = 128 SI_USER = 0 SI_KILL = SI_USER SI_QUEUE = -1 SI_ASYNCIO = -2 SI_TIMER = -3 SI_MESGQ = -4 SIG_NOP = 0 SIG_BLOCK = 1 SIG_UNBLOCK = 2 SIG_SETMASK = 3 SIG_SETMASK32 = 256 SA_ONSTACK = 0x00000001 SA_RESETHAND = 0x00000002 SA_RESTART = 0x00000004 SA_SIGINFO = 0x00000008 SA_NODEFER = 0x00000010 SA_NOCLDWAIT = 0x00010000 SA_NOCLDSTOP = 0x00020000 _SA_BSDCALL = 0x10000000 MINSIGSTKSZ = 512 SIGSTKSZ = 8192 SS_ONSTACK = 0x00000001 SS_DISABLE = 0x00000002 # Included from sys/ucontext.h NGREG = 36 NGREG = 37 GETCONTEXT = 0 SETCONTEXT = 1 UC_SIGMASK = 001 UC_STACK = 002 UC_CPU = 004 UC_MAU = 010 UC_MCONTEXT = (UC_CPU|UC_MAU) UC_ALL = (UC_SIGMASK|UC_STACK|UC_MCONTEXT) CTX_R0 = 0 CTX_AT = 1 CTX_V0 = 2 CTX_V1 = 3 CTX_A0 = 4 CTX_A1 = 5 CTX_A2 = 6 CTX_A3 = 7 CTX_T0 = 8 CTX_T1 = 9 CTX_T2 = 10 CTX_T3 = 11 CTX_T4 = 12 CTX_T5 = 13 CTX_T6 = 14 CTX_T7 = 15 CTX_A4 = 8 CTX_A5 = 9 CTX_A6 = 10 CTX_A7 = 11 CTX_T0 = 12 CTX_T1 = 13 CTX_T2 = 14 CTX_T3 = 15 CTX_S0 = 16 CTX_S1 = 17 CTX_S2 = 18 CTX_S3 = 19 CTX_S4 = 20 CTX_S5 = 21 CTX_S6 = 22 CTX_S7 = 23 CTX_T8 = 24 CTX_T9 = 25 CTX_K0 = 26 CTX_K1 = 27 CTX_GP = 28 CTX_SP = 29 CTX_S8 = 30 CTX_RA = 31 CTX_MDLO = 32 CTX_MDHI = 33 CTX_CAUSE = 34 CTX_EPC = 35 CTX_SR = 36 CXT_R0 = CTX_R0 CXT_AT = CTX_AT CXT_V0 = CTX_V0 CXT_V1 = CTX_V1 CXT_A0 = CTX_A0 CXT_A1 = CTX_A1 CXT_A2 = CTX_A2 CXT_A3 = CTX_A3 CXT_T0 = CTX_T0 CXT_T1 = CTX_T1 CXT_T2 = CTX_T2 CXT_T3 = CTX_T3 CXT_T4 = CTX_T4 CXT_T5 = CTX_T5 CXT_T6 = CTX_T6 CXT_T7 = CTX_T7 CXT_S0 = CTX_S0 CXT_S1 = CTX_S1 CXT_S2 = CTX_S2 CXT_S3 = CTX_S3 CXT_S4 = CTX_S4 CXT_S5 = CTX_S5 CXT_S6 = CTX_S6 CXT_S7 = CTX_S7 CXT_T8 = CTX_T8 CXT_T9 = CTX_T9 CXT_K0 = CTX_K0 CXT_K1 = CTX_K1 CXT_GP = CTX_GP CXT_SP = CTX_SP CXT_S8 = CTX_S8 CXT_RA = CTX_RA CXT_MDLO = CTX_MDLO CXT_MDHI = CTX_MDHI CXT_CAUSE = CTX_CAUSE CXT_EPC = CTX_EPC CXT_SR = CTX_SR SV_ONSTACK = 0x0001 SV_INTERRUPT = 0x0002 NUMBSDSIGS = (32) def sigmask(sig): return (1L << ((sig)-1)) def sigmask(sig): return (1L << ((sig)-1)) SIG_ERR = (-1) SIG_IGN = (1) SIG_HOLD = (2) SIG_DFL = (0) NSIG = 65 MAXSIG = (NSIG-1) NUMSIGS = (NSIG-1) BRK_USERBP = 0 BRK_KERNELBP = 1 BRK_ABORT = 2 BRK_BD_TAKEN = 3 BRK_BD_NOTTAKEN = 4 BRK_SSTEPBP = 5 BRK_OVERFLOW = 6 BRK_DIVZERO = 7 BRK_RANGE = 8 BRK_PSEUDO_OP_BIT = 0x80 BRK_PSEUDO_OP_MAX = 0x3 BRK_CACHE_SYNC = 0x80 BRK_SWASH_FLUSH = 0x81 BRK_SWASH_SWTCH = 0x82 BRK_MULOVF = 1023 # Included from sys/resource.h PRIO_MIN = -20 PRIO_MAX = 20 PRIO_PROCESS = 0 PRIO_PGRP = 1 PRIO_USER = 2 RUSAGE_SELF = 0 RUSAGE_CHILDREN = -1 RLIMIT_CPU = 0 RLIMIT_FSIZE = 1 RLIMIT_DATA = 2 RLIMIT_STACK = 3 RLIMIT_CORE = 4 RLIMIT_NOFILE = 5 RLIMIT_VMEM = 6 RLIMIT_RSS = 7 RLIMIT_AS = RLIMIT_VMEM RLIM_NLIMITS = 8 RLIM32_INFINITY = 0x7fffffff RLIM_INFINITY = 0x7fffffff
agpl-3.0
NetApp/cinder
cinder/volume/drivers/hitachi/hbsd_iscsi.py
5
16561
# Copyright (C) 2014, Hitachi, Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ iSCSI Cinder volume driver for Hitachi storage. """ import os import threading from oslo_config import cfg from oslo_log import log as logging import six from cinder import exception from cinder.i18n import _LE, _LI from cinder import interface from cinder import utils import cinder.volume.driver from cinder.volume.drivers.hitachi import hbsd_basiclib as basic_lib from cinder.volume.drivers.hitachi import hbsd_common as common LOG = logging.getLogger(__name__) CHAP_METHOD = ('None', 'CHAP None', 'CHAP') volume_opts = [ cfg.BoolOpt('hitachi_add_chap_user', default=False, help='Add CHAP user'), cfg.StrOpt('hitachi_auth_method', help='iSCSI authentication method'), cfg.StrOpt('hitachi_auth_user', default='%sCHAP-user' % basic_lib.NAME_PREFIX, help='iSCSI authentication username'), cfg.StrOpt('hitachi_auth_password', default='%sCHAP-password' % basic_lib.NAME_PREFIX, help='iSCSI authentication password', secret=True), ] CONF = cfg.CONF CONF.register_opts(volume_opts) @interface.volumedriver class HBSDISCSIDriver(cinder.volume.driver.ISCSIDriver): VERSION = common.VERSION # ThirdPartySystems wiki page CI_WIKI_NAME = ["Hitachi_HBSD_CI", "Hitachi_HBSD2_CI"] def __init__(self, *args, **kwargs): os.environ['LANG'] = 'C' super(HBSDISCSIDriver, self).__init__(*args, **kwargs) self.db = kwargs.get('db') self.common = None self.configuration.append_config_values(common.volume_opts) self._stats = {} self.context = None self.do_setup_status = threading.Event() def _check_param(self): self.configuration.append_config_values(volume_opts) if (self.configuration.hitachi_auth_method and self.configuration.hitachi_auth_method not in CHAP_METHOD): raise exception.HBSDError( message=basic_lib.output_err(601, param='hitachi_auth_method')) if self.configuration.hitachi_auth_method == 'None': self.configuration.hitachi_auth_method = None for opt in volume_opts: getattr(self.configuration, opt.name) def check_param(self): try: self.common.check_param() self._check_param() except exception.HBSDError: raise except Exception as ex: raise exception.HBSDError( message=basic_lib.output_err(601, param=six.text_type(ex))) def output_param_to_log(self): lock = basic_lib.get_process_lock(self.common.system_lock_file) with lock: self.common.output_param_to_log('iSCSI') for opt in volume_opts: if not opt.secret: value = getattr(self.configuration, opt.name) LOG.info(_LI('\t%(name)-35s : %(value)s'), {'name': opt.name, 'value': value}) def _delete_lun_iscsi(self, hostgroups, ldev): try: self.common.command.comm_delete_lun_iscsi(hostgroups, ldev) except exception.HBSDNotFound: LOG.warning(basic_lib.set_msg(301, ldev=ldev)) def _add_target(self, hostgroups, ldev): self.common.add_lun('autargetmap', hostgroups, ldev) def _add_initiator(self, hgs, port, gid, host_iqn): self.common.command.comm_add_initiator(port, gid, host_iqn) hgs.append({'port': port, 'gid': int(gid), 'detected': True}) LOG.debug("Create iSCSI target for %s", hgs) def _get_unused_gid_iscsi(self, port): group_range = self.configuration.hitachi_group_range if not group_range: group_range = basic_lib.DEFAULT_GROUP_RANGE return self.common.command.get_unused_gid_iscsi(group_range, port) def _delete_iscsi_target(self, port, target_no, target_alias): ret, _stdout, _stderr = self.common.command.delete_iscsi_target( port, target_no, target_alias) if ret: LOG.warning(basic_lib.set_msg( 307, port=port, tno=target_no, alias=target_alias)) def _delete_chap_user(self, port): ret, _stdout, _stderr = self.common.command.delete_chap_user(port) if ret: LOG.warning(basic_lib.set_msg( 303, user=self.configuration.hitachi_auth_user)) def _get_hostgroup_info_iscsi(self, hgs, host_iqn): return self.common.command.comm_get_hostgroup_info_iscsi( hgs, host_iqn, self.configuration.hitachi_target_ports) def _discovery_iscsi_target(self, hostgroups): for hostgroup in hostgroups: ip_addr, ip_port = self.common.command.comm_get_iscsi_ip( hostgroup['port']) target_iqn = self.common.command.comm_get_target_iqn( hostgroup['port'], hostgroup['gid']) hostgroup['ip_addr'] = ip_addr hostgroup['ip_port'] = ip_port hostgroup['target_iqn'] = target_iqn LOG.debug("ip_addr=%(addr)s ip_port=%(port)s target_iqn=%(iqn)s", {'addr': ip_addr, 'port': ip_port, 'iqn': target_iqn}) def _fill_groups(self, hgs, ports, target_iqn, target_alias, add_iqn): for port in ports: added_hostgroup = False added_user = False LOG.debug('Create target (hgs: %(hgs)s port: %(port)s ' 'target_iqn: %(tiqn)s target_alias: %(alias)s ' 'add_iqn: %(aiqn)s)', {'hgs': hgs, 'port': port, 'tiqn': target_iqn, 'alias': target_alias, 'aiqn': add_iqn}) gid = self.common.command.get_gid_from_targetiqn( target_iqn, target_alias, port) if gid is None: for retry_cnt in basic_lib.DEFAULT_TRY_RANGE: gid = None try: gid = self._get_unused_gid_iscsi(port) self.common.command.comm_add_hostgrp_iscsi( port, gid, target_alias, target_iqn) added_hostgroup = True except exception.HBSDNotFound: LOG.warning(basic_lib.set_msg(312, resource='GID')) continue except Exception as ex: LOG.warning(basic_lib.set_msg( 309, port=port, alias=target_alias, reason=ex)) break else: LOG.debug('Completed to add target' '(port: %(port)s gid: %(gid)d)', {'port': port, 'gid': gid}) break if gid is None: LOG.error(_LE('Failed to add target(port: %s)'), port) continue try: if added_hostgroup: if self.configuration.hitachi_auth_method: added_user = self.common.command.set_chap_authention( port, gid) self.common.command.comm_set_hostgrp_reportportal( port, target_alias) self._add_initiator(hgs, port, gid, add_iqn) except Exception as ex: LOG.warning(basic_lib.set_msg( 316, port=port, reason=ex)) if added_hostgroup: if added_user: self._delete_chap_user(port) self._delete_iscsi_target(port, gid, target_alias) def add_hostgroup_core(self, hgs, ports, target_iqn, target_alias, add_iqn): if ports: self._fill_groups(hgs, ports, target_iqn, target_alias, add_iqn) def add_hostgroup_master(self, hgs, master_iqn, host_ip, security_ports): target_ports = self.configuration.hitachi_target_ports group_request = self.configuration.hitachi_group_request target_alias = '%s%s' % (basic_lib.NAME_PREFIX, host_ip) if target_ports and group_request: target_iqn = '%s.target' % master_iqn diff_ports = [] for port in security_ports: for hostgroup in hgs: if hostgroup['port'] == port: break else: diff_ports.append(port) self.add_hostgroup_core(hgs, diff_ports, target_iqn, target_alias, master_iqn) if not hgs: raise exception.HBSDError(message=basic_lib.output_err(649)) def add_hostgroup(self): properties = utils.brick_get_connector_properties() if 'initiator' not in properties: raise exception.HBSDError( message=basic_lib.output_err(650, resource='HBA')) LOG.debug("initiator: %s", properties['initiator']) hostgroups = [] security_ports = self._get_hostgroup_info_iscsi( hostgroups, properties['initiator']) self.add_hostgroup_master(hostgroups, properties['initiator'], properties['ip'], security_ports) def _get_properties(self, volume, hostgroups): conf = self.configuration properties = {} self._discovery_iscsi_target(hostgroups) hostgroup = hostgroups[0] properties['target_discovered'] = True properties['target_portal'] = "%s:%s" % (hostgroup['ip_addr'], hostgroup['ip_port']) properties['target_iqn'] = hostgroup['target_iqn'] properties['target_lun'] = hostgroup['lun'] if conf.hitachi_auth_method: properties['auth_method'] = 'CHAP' properties['auth_username'] = conf.hitachi_auth_user properties['auth_password'] = conf.hitachi_auth_password return properties def do_setup(self, context): self.context = context self.common = common.HBSDCommon(self.configuration, self, context, self.db) self.check_param() self.common.create_lock_file() self.common.command.connect_storage() lock = basic_lib.get_process_lock(self.common.service_lock_file) with lock: self.add_hostgroup() self.output_param_to_log() self.do_setup_status.set() def check_for_setup_error(self): pass def extend_volume(self, volume, new_size): self.do_setup_status.wait() self.common.extend_volume(volume, new_size) def get_volume_stats(self, refresh=False): if refresh: if self.do_setup_status.isSet(): self.common.output_backend_available_once() _stats = self.common.update_volume_stats("iSCSI") if _stats: self._stats = _stats return self._stats def create_volume(self, volume): self.do_setup_status.wait() metadata = self.common.create_volume(volume) return metadata def delete_volume(self, volume): self.do_setup_status.wait() self.common.delete_volume(volume) def create_snapshot(self, snapshot): self.do_setup_status.wait() metadata = self.common.create_snapshot(snapshot) return metadata def delete_snapshot(self, snapshot): self.do_setup_status.wait() self.common.delete_snapshot(snapshot) def create_cloned_volume(self, volume, src_vref): self.do_setup_status.wait() metadata = self.common.create_cloned_volume(volume, src_vref) return metadata def create_volume_from_snapshot(self, volume, snapshot): self.do_setup_status.wait() metadata = self.common.create_volume_from_snapshot(volume, snapshot) return metadata def _initialize_connection(self, ldev, connector, src_hgs=None): LOG.debug("Call _initialize_connection " "(config_group: %(group)s ldev: %(ldev)d)", {'group': self.configuration.config_group, 'ldev': ldev}) if src_hgs: hostgroups = src_hgs[:] else: hostgroups = [] security_ports = self._get_hostgroup_info_iscsi( hostgroups, connector['initiator']) self.add_hostgroup_master(hostgroups, connector['initiator'], connector['ip'], security_ports) self._add_target(hostgroups, ldev) return hostgroups def initialize_connection(self, volume, connector): self.do_setup_status.wait() ldev = self.common.get_ldev(volume) if ldev is None: raise exception.HBSDError( message=basic_lib.output_err(619, volume_id=volume['id'])) self.common.add_volinfo(ldev, volume['id']) with self.common.volume_info[ldev]['lock'],\ self.common.volume_info[ldev]['in_use']: hostgroups = self._initialize_connection(ldev, connector) protocol = 'iscsi' properties = self._get_properties(volume, hostgroups) LOG.debug('Initialize volume_info: %s', self.common.volume_info) LOG.debug('HFCDrv: properties=%s', properties) return { 'driver_volume_type': protocol, 'data': properties } def _terminate_connection(self, ldev, connector, src_hgs): LOG.debug("Call _terminate_connection(config_group: %s)", self.configuration.config_group) hostgroups = src_hgs[:] self._delete_lun_iscsi(hostgroups, ldev) LOG.debug("*** _terminate_ ***") def terminate_connection(self, volume, connector, **kwargs): self.do_setup_status.wait() ldev = self.common.get_ldev(volume) if ldev is None: LOG.warning(basic_lib.set_msg(302, volume_id=volume['id'])) return if 'initiator' not in connector: raise exception.HBSDError( message=basic_lib.output_err(650, resource='HBA')) hostgroups = [] self._get_hostgroup_info_iscsi(hostgroups, connector['initiator']) if not hostgroups: raise exception.HBSDError(message=basic_lib.output_err(649)) self.common.add_volinfo(ldev, volume['id']) with self.common.volume_info[ldev]['lock'],\ self.common.volume_info[ldev]['in_use']: self._terminate_connection(ldev, connector, hostgroups) def create_export(self, context, volume, connector): pass def ensure_export(self, context, volume): pass def remove_export(self, context, volume): pass def pair_initialize_connection(self, unused_ldev): pass def pair_terminate_connection(self, unused_ldev): pass def copy_volume_to_image(self, context, volume, image_service, image_meta): self.do_setup_status.wait() if volume['volume_attachment']: desc = 'volume %s' % volume['id'] raise exception.HBSDError( message=basic_lib.output_err(660, desc=desc)) super(HBSDISCSIDriver, self).copy_volume_to_image(context, volume, image_service, image_meta) def manage_existing(self, volume, existing_ref): return self.common.manage_existing(volume, existing_ref) def manage_existing_get_size(self, volume, existing_ref): self.do_setup_status.wait() return self.common.manage_existing_get_size(volume, existing_ref) def unmanage(self, volume): self.do_setup_status.wait() self.common.unmanage(volume)
apache-2.0
r1cc4rdo/alfred-workflows
timezones/local_pytz/reference.py
839
3649
''' Reference tzinfo implementations from the Python docs. Used for testing against as they are only correct for the years 1987 to 2006. Do not use these for real code. ''' from datetime import tzinfo, timedelta, datetime from pytz import utc, UTC, HOUR, ZERO # A class building tzinfo objects for fixed-offset time zones. # Note that FixedOffset(0, "UTC") is a different way to build a # UTC tzinfo object. class FixedOffset(tzinfo): """Fixed offset in minutes east from UTC.""" def __init__(self, offset, name): self.__offset = timedelta(minutes = offset) self.__name = name def utcoffset(self, dt): return self.__offset def tzname(self, dt): return self.__name def dst(self, dt): return ZERO # A class capturing the platform's idea of local time. import time as _time STDOFFSET = timedelta(seconds = -_time.timezone) if _time.daylight: DSTOFFSET = timedelta(seconds = -_time.altzone) else: DSTOFFSET = STDOFFSET DSTDIFF = DSTOFFSET - STDOFFSET class LocalTimezone(tzinfo): def utcoffset(self, dt): if self._isdst(dt): return DSTOFFSET else: return STDOFFSET def dst(self, dt): if self._isdst(dt): return DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 Local = LocalTimezone() # A complete implementation of current DST rules for major US time zones. def first_sunday_on_or_after(dt): days_to_go = 6 - dt.weekday() if days_to_go: dt += timedelta(days_to_go) return dt # In the US, DST starts at 2am (standard time) on the first Sunday in April. DSTSTART = datetime(1, 4, 1, 2) # and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct. # which is the first Sunday on or after Oct 25. DSTEND = datetime(1, 10, 25, 1) class USTimeZone(tzinfo): def __init__(self, hours, reprname, stdname, dstname): self.stdoffset = timedelta(hours=hours) self.reprname = reprname self.stdname = stdname self.dstname = dstname def __repr__(self): return self.reprname def tzname(self, dt): if self.dst(dt): return self.dstname else: return self.stdname def utcoffset(self, dt): return self.stdoffset + self.dst(dt) def dst(self, dt): if dt is None or dt.tzinfo is None: # An exception may be sensible here, in one or both cases. # It depends on how you want to treat them. The default # fromutc() implementation (called by the default astimezone() # implementation) passes a datetime with dt.tzinfo is self. return ZERO assert dt.tzinfo is self # Find first Sunday in April & the last in October. start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year)) end = first_sunday_on_or_after(DSTEND.replace(year=dt.year)) # Can't compare naive to aware objects, so strip the timezone from # dt first. if start <= dt.replace(tzinfo=None) < end: return HOUR else: return ZERO Eastern = USTimeZone(-5, "Eastern", "EST", "EDT") Central = USTimeZone(-6, "Central", "CST", "CDT") Mountain = USTimeZone(-7, "Mountain", "MST", "MDT") Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
gpl-3.0
rohitwaghchaure/erpnext_smart
erpnext/stock/doctype/delivery_note/test_delivery_note.py
37
7463
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import unittest import frappe import frappe.defaults from frappe.utils import cint from erpnext.stock.doctype.purchase_receipt.test_purchase_receipt import get_gl_entries, set_perpetual_inventory, test_records as pr_test_records def _insert_purchase_receipt(item_code=None): if not item_code: item_code = pr_test_records[0]["purchase_receipt_details"][0]["item_code"] pr = frappe.copy_doc(pr_test_records[0]) pr.get("purchase_receipt_details")[0].item_code = item_code pr.insert() pr.submit() class TestDeliveryNote(unittest.TestCase): def test_over_billing_against_dn(self): self.clear_stock_account_balance() _insert_purchase_receipt() from erpnext.stock.doctype.delivery_note.delivery_note import make_sales_invoice _insert_purchase_receipt() dn = frappe.copy_doc(test_records[0]).insert() self.assertRaises(frappe.ValidationError, make_sales_invoice, dn.name) dn = frappe.get_doc("Delivery Note", dn.name) dn.submit() si = make_sales_invoice(dn.name) self.assertEquals(len(si.get("entries")), len(dn.get("delivery_note_details"))) # modify amount si.get("entries")[0].rate = 200 self.assertRaises(frappe.ValidationError, frappe.get_doc(si).insert) def test_delivery_note_no_gl_entry(self): self.clear_stock_account_balance() set_perpetual_inventory(0) self.assertEqual(cint(frappe.defaults.get_global_default("auto_accounting_for_stock")), 0) _insert_purchase_receipt() dn = frappe.copy_doc(test_records[0]) dn.insert() dn.submit() stock_value, stock_value_difference = frappe.db.get_value("Stock Ledger Entry", {"voucher_type": "Delivery Note", "voucher_no": dn.name, "item_code": "_Test Item"}, ["stock_value", "stock_value_difference"]) self.assertEqual(stock_value, 0) self.assertEqual(stock_value_difference, -375) self.assertFalse(get_gl_entries("Delivery Note", dn.name)) def test_delivery_note_gl_entry(self): self.clear_stock_account_balance() set_perpetual_inventory() self.assertEqual(cint(frappe.defaults.get_global_default("auto_accounting_for_stock")), 1) frappe.db.set_value("Item", "_Test Item", "valuation_method", "FIFO") _insert_purchase_receipt() dn = frappe.copy_doc(test_records[0]) dn.get("delivery_note_details")[0].expense_account = "Cost of Goods Sold - _TC" dn.get("delivery_note_details")[0].cost_center = "Main - _TC" stock_in_hand_account = frappe.db.get_value("Account", {"master_name": dn.get("delivery_note_details")[0].warehouse}) from erpnext.accounts.utils import get_balance_on prev_bal = get_balance_on(stock_in_hand_account, dn.posting_date) dn.insert() dn.submit() gl_entries = get_gl_entries("Delivery Note", dn.name) self.assertTrue(gl_entries) expected_values = { stock_in_hand_account: [0.0, 375.0], "Cost of Goods Sold - _TC": [375.0, 0.0] } for i, gle in enumerate(gl_entries): self.assertEquals([gle.debit, gle.credit], expected_values.get(gle.account)) # check stock in hand balance bal = get_balance_on(stock_in_hand_account, dn.posting_date) self.assertEquals(bal, prev_bal - 375.0) # back dated purchase receipt pr = frappe.copy_doc(pr_test_records[0]) pr.posting_date = "2013-01-01" pr.get("purchase_receipt_details")[0].rate = 100 pr.get("purchase_receipt_details")[0].base_amount = 100 pr.insert() pr.submit() gl_entries = get_gl_entries("Delivery Note", dn.name) self.assertTrue(gl_entries) expected_values = { stock_in_hand_account: [0.0, 666.67], "Cost of Goods Sold - _TC": [666.67, 0.0] } for i, gle in enumerate(gl_entries): self.assertEquals([gle.debit, gle.credit], expected_values.get(gle.account)) dn.cancel() self.assertFalse(get_gl_entries("Delivery Note", dn.name)) set_perpetual_inventory(0) def test_delivery_note_gl_entry_packing_item(self): self.clear_stock_account_balance() set_perpetual_inventory() _insert_purchase_receipt() _insert_purchase_receipt("_Test Item Home Desktop 100") dn = frappe.copy_doc(test_records[0]) dn.get("delivery_note_details")[0].item_code = "_Test Sales BOM Item" dn.get("delivery_note_details")[0].qty = 1 stock_in_hand_account = frappe.db.get_value("Account", {"master_name": dn.get("delivery_note_details")[0].warehouse}) from erpnext.accounts.utils import get_balance_on prev_bal = get_balance_on(stock_in_hand_account, dn.posting_date) dn.insert() dn.submit() gl_entries = get_gl_entries("Delivery Note", dn.name) self.assertTrue(gl_entries) expected_values = { stock_in_hand_account: [0.0, 525], "Cost of Goods Sold - _TC": [525.0, 0.0] } for i, gle in enumerate(gl_entries): self.assertEquals([gle.debit, gle.credit], expected_values.get(gle.account)) # check stock in hand balance bal = get_balance_on(stock_in_hand_account, dn.posting_date) self.assertEquals(bal, prev_bal - 525.0) dn.cancel() self.assertFalse(get_gl_entries("Delivery Note", dn.name)) set_perpetual_inventory(0) def test_serialized(self): from erpnext.stock.doctype.stock_entry.test_stock_entry import make_serialized_item from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos se = make_serialized_item() serial_nos = get_serial_nos(se.get("mtn_details")[0].serial_no) dn = frappe.copy_doc(test_records[0]) dn.get("delivery_note_details")[0].item_code = "_Test Serialized Item With Series" dn.get("delivery_note_details")[0].qty = 1 dn.get("delivery_note_details")[0].serial_no = serial_nos[0] dn.insert() dn.submit() self.assertEquals(frappe.db.get_value("Serial No", serial_nos[0], "status"), "Delivered") self.assertFalse(frappe.db.get_value("Serial No", serial_nos[0], "warehouse")) self.assertEquals(frappe.db.get_value("Serial No", serial_nos[0], "delivery_document_no"), dn.name) return dn def test_serialized_cancel(self): from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos dn = self.test_serialized() dn.cancel() serial_nos = get_serial_nos(dn.get("delivery_note_details")[0].serial_no) self.assertEquals(frappe.db.get_value("Serial No", serial_nos[0], "status"), "Available") self.assertEquals(frappe.db.get_value("Serial No", serial_nos[0], "warehouse"), "_Test Warehouse - _TC") self.assertFalse(frappe.db.get_value("Serial No", serial_nos[0], "delivery_document_no")) def test_serialize_status(self): from erpnext.stock.doctype.serial_no.serial_no import SerialNoStatusError, get_serial_nos from erpnext.stock.doctype.stock_entry.test_stock_entry import make_serialized_item se = make_serialized_item() serial_nos = get_serial_nos(se.get("mtn_details")[0].serial_no) sr = frappe.get_doc("Serial No", serial_nos[0]) sr.status = "Not Available" sr.save() dn = frappe.copy_doc(test_records[0]) dn.get("delivery_note_details")[0].item_code = "_Test Serialized Item With Series" dn.get("delivery_note_details")[0].qty = 1 dn.get("delivery_note_details")[0].serial_no = serial_nos[0] dn.insert() self.assertRaises(SerialNoStatusError, dn.submit) def clear_stock_account_balance(self): frappe.db.sql("""delete from `tabBin`""") frappe.db.sql("delete from `tabStock Ledger Entry`") frappe.db.sql("delete from `tabGL Entry`") test_dependencies = ["Sales BOM"] test_records = frappe.get_test_records('Delivery Note')
agpl-3.0
lukaszb/django-guardian
guardian/testapp/tests/test_other.py
1
13332
from unittest import mock import unittest from django.contrib.auth import get_user_model from django.contrib.auth.models import AbstractUser from django.contrib.auth.models import AnonymousUser from django.contrib.auth.models import Group from django.contrib.auth.models import Permission from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.test import TestCase import guardian from guardian.backends import ObjectPermissionBackend from guardian.compat import get_user_model_path from guardian.compat import get_user_permission_codename from guardian.exceptions import GuardianError from guardian.exceptions import NotUserNorGroup from guardian.exceptions import ObjectNotPersisted from guardian.exceptions import WrongAppError from guardian.models import GroupObjectPermission from guardian.models import UserObjectPermission from guardian.testapp.tests.conf import TestDataMixin User = get_user_model() user_model_path = get_user_model_path() class UserPermissionTests(TestDataMixin, TestCase): def setUp(self): super().setUp() self.user = User.objects.get(username='jack') self.ctype = ContentType.objects.create( model='bar', app_label='fake-for-guardian-tests') self.obj1 = ContentType.objects.create( model='foo', app_label='guardian-tests') self.obj2 = ContentType.objects.create( model='bar', app_label='guardian-tests') def test_assignement(self): self.assertFalse(self.user.has_perm('change_contenttype', self.ctype)) UserObjectPermission.objects.assign_perm('change_contenttype', self.user, self.ctype) self.assertTrue(self.user.has_perm('change_contenttype', self.ctype)) self.assertTrue(self.user.has_perm('contenttypes.change_contenttype', self.ctype)) def test_assignement_and_remove(self): UserObjectPermission.objects.assign_perm('change_contenttype', self.user, self.ctype) self.assertTrue(self.user.has_perm('change_contenttype', self.ctype)) UserObjectPermission.objects.remove_perm('change_contenttype', self.user, self.ctype) self.assertFalse(self.user.has_perm('change_contenttype', self.ctype)) def test_ctypes(self): UserObjectPermission.objects.assign_perm( 'change_contenttype', self.user, self.obj1) self.assertTrue(self.user.has_perm('change_contenttype', self.obj1)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj2)) UserObjectPermission.objects.remove_perm( 'change_contenttype', self.user, self.obj1) UserObjectPermission.objects.assign_perm( 'change_contenttype', self.user, self.obj2) self.assertTrue(self.user.has_perm('change_contenttype', self.obj2)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj1)) UserObjectPermission.objects.assign_perm( 'change_contenttype', self.user, self.obj1) UserObjectPermission.objects.assign_perm( 'change_contenttype', self.user, self.obj2) self.assertTrue(self.user.has_perm('change_contenttype', self.obj2)) self.assertTrue(self.user.has_perm('change_contenttype', self.obj1)) UserObjectPermission.objects.remove_perm( 'change_contenttype', self.user, self.obj1) UserObjectPermission.objects.remove_perm( 'change_contenttype', self.user, self.obj2) self.assertFalse(self.user.has_perm('change_contenttype', self.obj2)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj1)) def test_assign_perm_validation(self): self.assertRaises(Permission.DoesNotExist, UserObjectPermission.objects.assign_perm, 'change_group', self.user, self.user) group = Group.objects.create(name='test_group_assign_perm_validation') ctype = ContentType.objects.get_for_model(group) user_ctype = ContentType.objects.get_for_model(self.user) codename = get_user_permission_codename('change') perm = Permission.objects.get( codename=codename, content_type=user_ctype) create_info = dict( permission=perm, user=self.user, content_type=ctype, object_pk=group.pk ) self.assertRaises(ValidationError, UserObjectPermission.objects.create, **create_info) def test_errors(self): not_saved_user = User(username='not_saved_user') codename = get_user_permission_codename('change') self.assertRaises(ObjectNotPersisted, UserObjectPermission.objects.assign_perm, codename, self.user, not_saved_user) self.assertRaises(ObjectNotPersisted, UserObjectPermission.objects.remove_perm, codename, self.user, not_saved_user) class GroupPermissionTests(TestDataMixin, TestCase): def setUp(self): super().setUp() self.user = User.objects.get(username='jack') self.group, created = Group.objects.get_or_create(name='jackGroup') self.user.groups.add(self.group) self.ctype = ContentType.objects.create( model='bar', app_label='fake-for-guardian-tests') self.obj1 = ContentType.objects.create( model='foo', app_label='guardian-tests') self.obj2 = ContentType.objects.create( model='bar', app_label='guardian-tests') def test_assignement(self): self.assertFalse(self.user.has_perm('change_contenttype', self.ctype)) self.assertFalse(self.user.has_perm('contenttypes.change_contenttype', self.ctype)) GroupObjectPermission.objects.assign_perm('change_contenttype', self.group, self.ctype) self.assertTrue(self.user.has_perm('change_contenttype', self.ctype)) self.assertTrue(self.user.has_perm('contenttypes.change_contenttype', self.ctype)) def test_assignement_and_remove(self): GroupObjectPermission.objects.assign_perm('change_contenttype', self.group, self.ctype) self.assertTrue(self.user.has_perm('change_contenttype', self.ctype)) GroupObjectPermission.objects.remove_perm('change_contenttype', self.group, self.ctype) self.assertFalse(self.user.has_perm('change_contenttype', self.ctype)) def test_ctypes(self): GroupObjectPermission.objects.assign_perm('change_contenttype', self.group, self.obj1) self.assertTrue(self.user.has_perm('change_contenttype', self.obj1)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj2)) GroupObjectPermission.objects.remove_perm('change_contenttype', self.group, self.obj1) GroupObjectPermission.objects.assign_perm('change_contenttype', self.group, self.obj2) self.assertTrue(self.user.has_perm('change_contenttype', self.obj2)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj1)) GroupObjectPermission.objects.assign_perm('change_contenttype', self.group, self.obj1) GroupObjectPermission.objects.assign_perm('change_contenttype', self.group, self.obj2) self.assertTrue(self.user.has_perm('change_contenttype', self.obj2)) self.assertTrue(self.user.has_perm('change_contenttype', self.obj1)) GroupObjectPermission.objects.remove_perm('change_contenttype', self.group, self.obj1) GroupObjectPermission.objects.remove_perm('change_contenttype', self.group, self.obj2) self.assertFalse(self.user.has_perm('change_contenttype', self.obj2)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj1)) def test_assign_perm_validation(self): self.assertRaises(Permission.DoesNotExist, GroupObjectPermission.objects.assign_perm, 'change_user', self.group, self.group) user = User.objects.create(username='testuser') ctype = ContentType.objects.get_for_model(user) perm = Permission.objects.get(codename='change_group') create_info = dict( permission=perm, group=self.group, content_type=ctype, object_pk=user.pk ) self.assertRaises(ValidationError, GroupObjectPermission.objects.create, **create_info) def test_errors(self): not_saved_group = Group(name='not_saved_group') self.assertRaises(ObjectNotPersisted, GroupObjectPermission.objects.assign_perm, "change_group", self.group, not_saved_group) self.assertRaises(ObjectNotPersisted, GroupObjectPermission.objects.remove_perm, "change_group", self.group, not_saved_group) class ObjectPermissionBackendTests(TestCase): def setUp(self): self.user = User.objects.create(username='jack') self.backend = ObjectPermissionBackend() def test_attrs(self): self.assertTrue(self.backend.supports_anonymous_user) self.assertTrue(self.backend.supports_object_permissions) self.assertTrue(self.backend.supports_inactive_user) def test_authenticate(self): self.assertEqual( self.backend.authenticate( request={}, username=self.user.username, password=self.user.password ), None ) def test_has_perm_noobj(self): result = self.backend.has_perm(self.user, "change_contenttype") self.assertFalse(result) def test_has_perm_notauthed(self): user = AnonymousUser() self.assertFalse(self.backend.has_perm(user, "change_user", self.user)) def test_has_perm_wrong_app(self): self.assertRaises(WrongAppError, self.backend.has_perm, self.user, "no_app.change_user", self.user) def test_obj_is_not_model(self): for obj in (Group, 666, "String", [2, 1, 5, 7], {}): self.assertFalse(self.backend.has_perm(self.user, "any perm", obj)) def test_not_active_user(self): user = User.objects.create(username='non active user') ctype = ContentType.objects.create( model='bar', app_label='fake-for-guardian-tests') perm = 'change_contenttype' UserObjectPermission.objects.assign_perm(perm, user, ctype) self.assertTrue(self.backend.has_perm(user, perm, ctype)) user.is_active = False user.save() self.assertFalse(self.backend.has_perm(user, perm, ctype)) class GuardianBaseTests(TestCase): def has_attrs(self): self.assertTrue(hasattr(guardian, '__version__')) def test_version(self): for x in guardian.VERSION: self.assertTrue(isinstance(x, (int, str))) def test_get_version(self): self.assertTrue(isinstance(guardian.get_version(), str)) class TestExceptions(TestCase): def _test_error_class(self, exc_cls): self.assertTrue(isinstance(exc_cls, GuardianError)) def test_error_classes(self): self.assertTrue(isinstance(GuardianError(), Exception)) guardian_errors = [NotUserNorGroup] for err in guardian_errors: self._test_error_class(err()) @unittest.skip("test is broken") class TestMonkeyPatch(TestCase): @mock.patch('django.contrib.auth.get_user_model') def test_monkey_patch(self, mocked_get_user_model): class CustomUserTestClass(AbstractUser): pass mocked_get_user_model.return_value = CustomUserTestClass self.assertFalse(getattr(CustomUserTestClass, 'get_anonymous', False)) self.assertFalse(getattr(CustomUserTestClass, 'add_obj_perm', False)) self.assertFalse(getattr(CustomUserTestClass, 'del_obj_perm', False)) self.assertFalse(getattr(CustomUserTestClass, 'evict_obj_perms_cache', False)) # Monkey Patch guardian.monkey_patch_user() self.assertTrue(getattr(CustomUserTestClass, 'get_anonymous', False)) self.assertTrue(getattr(CustomUserTestClass, 'add_obj_perm', False)) self.assertTrue(getattr(CustomUserTestClass, 'del_obj_perm', False)) self.assertTrue(getattr(CustomUserTestClass, 'evict_obj_perms_cache', False)) user = CustomUserTestClass() self.assertFalse(user.evict_obj_perms_cache())
bsd-2-clause
empiredan/googletest
scripts/release_docs.py
1167
6132
#!/usr/bin/env python # # Copyright 2013 Google Inc. All Rights Reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Script for branching Google Test/Mock wiki pages for a new version. SYNOPSIS release_docs.py NEW_RELEASE_VERSION Google Test and Google Mock's external user documentation is in interlinked wiki files. When we release a new version of Google Test or Google Mock, we need to branch the wiki files such that users of a specific version of Google Test/Mock can look up documenation relevant for that version. This script automates that process by: - branching the current wiki pages (which document the behavior of the SVN trunk head) to pages for the specified version (e.g. branching FAQ.wiki to V2_6_FAQ.wiki when NEW_RELEASE_VERSION is 2.6); - updating the links in the branched files to point to the branched version (e.g. a link in V2_6_FAQ.wiki that pointed to Primer.wiki#Anchor will now point to V2_6_Primer.wiki#Anchor). NOTE: NEW_RELEASE_VERSION must be a NEW version number for which the wiki pages don't yet exist; otherwise you'll get SVN errors like "svn: Path 'V1_7_PumpManual.wiki' is not a directory" when running the script. EXAMPLE $ cd PATH/TO/GTEST_SVN_WORKSPACE/trunk $ scripts/release_docs.py 2.6 # create wiki pages for v2.6 $ svn status # verify the file list $ svn diff # verify the file contents $ svn commit -m "release wiki pages for v2.6" """ __author__ = 'wan@google.com (Zhanyong Wan)' import os import re import sys import common # Wiki pages that shouldn't be branched for every gtest/gmock release. GTEST_UNVERSIONED_WIKIS = ['DevGuide.wiki'] GMOCK_UNVERSIONED_WIKIS = [ 'DesignDoc.wiki', 'DevGuide.wiki', 'KnownIssues.wiki' ] def DropWikiSuffix(wiki_filename): """Removes the .wiki suffix (if any) from the given filename.""" return (wiki_filename[:-len('.wiki')] if wiki_filename.endswith('.wiki') else wiki_filename) class WikiBrancher(object): """Branches ...""" def __init__(self, dot_version): self.project, svn_root_path = common.GetSvnInfo() if self.project not in ('googletest', 'googlemock'): sys.exit('This script must be run in a gtest or gmock SVN workspace.') self.wiki_dir = svn_root_path + '/wiki' # Turn '2.6' to 'V2_6_'. self.version_prefix = 'V' + dot_version.replace('.', '_') + '_' self.files_to_branch = self.GetFilesToBranch() page_names = [DropWikiSuffix(f) for f in self.files_to_branch] # A link to Foo.wiki is in one of the following forms: # [Foo words] # [Foo#Anchor words] # [http://code.google.com/.../wiki/Foo words] # [http://code.google.com/.../wiki/Foo#Anchor words] # We want to replace 'Foo' with 'V2_6_Foo' in the above cases. self.search_for_re = re.compile( # This regex matches either # [Foo # or # /wiki/Foo # followed by a space or a #, where Foo is the name of an # unversioned wiki page. r'(\[|/wiki/)(%s)([ #])' % '|'.join(page_names)) self.replace_with = r'\1%s\2\3' % (self.version_prefix,) def GetFilesToBranch(self): """Returns a list of .wiki file names that need to be branched.""" unversioned_wikis = (GTEST_UNVERSIONED_WIKIS if self.project == 'googletest' else GMOCK_UNVERSIONED_WIKIS) return [f for f in os.listdir(self.wiki_dir) if (f.endswith('.wiki') and not re.match(r'^V\d', f) and # Excluded versioned .wiki files. f not in unversioned_wikis)] def BranchFiles(self): """Branches the .wiki files needed to be branched.""" print 'Branching %d .wiki files:' % (len(self.files_to_branch),) os.chdir(self.wiki_dir) for f in self.files_to_branch: command = 'svn cp %s %s%s' % (f, self.version_prefix, f) print command os.system(command) def UpdateLinksInBranchedFiles(self): for f in self.files_to_branch: source_file = os.path.join(self.wiki_dir, f) versioned_file = os.path.join(self.wiki_dir, self.version_prefix + f) print 'Updating links in %s.' % (versioned_file,) text = file(source_file, 'r').read() new_text = self.search_for_re.sub(self.replace_with, text) file(versioned_file, 'w').write(new_text) def main(): if len(sys.argv) != 2: sys.exit(__doc__) brancher = WikiBrancher(sys.argv[1]) brancher.BranchFiles() brancher.UpdateLinksInBranchedFiles() if __name__ == '__main__': main()
bsd-3-clause
darmaa/odoo
addons/account_anglo_saxon/__openerp__.py
67
2462
############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Anglo-Saxon Accounting', 'version': '1.2', 'author': 'OpenERP SA, Veritos', 'website': 'http://openerp.com - http://veritos.nl', 'description': """ This module supports the Anglo-Saxon accounting methodology by changing the accounting logic with stock transactions. ===================================================================================================================== The difference between the Anglo-Saxon accounting countries and the Rhine (or also called Continental accounting) countries is the moment of taking the Cost of Goods Sold versus Cost of Sales. Anglo-Saxons accounting does take the cost when sales invoice is created, Continental accounting will take the cost at the moment the goods are shipped. This module will add this functionality by using a interim account, to store the value of shipped goods and will contra book this interim account when the invoice is created to transfer this amount to the debtor or creditor account. Secondly, price differences between actual purchase price and fixed product standard price are booked on a separate account.""", 'images': ['images/account_anglo_saxon.jpeg'], 'depends': ['product', 'purchase'], 'category': 'Accounting & Finance', 'demo': [], 'data': ['product_view.xml'], 'test': ['test/anglo_saxon.yml', 'test/anglo_saxon_avg_fifo.yml'], 'auto_install': False, 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
tomviner/pytest
doc/en/example/assertion/test_setup_flow_example.py
18
1252
def setup_module(module): module.TestStateFullThing.classcount = 0 class TestStateFullThing: def setup_class(cls): cls.classcount += 1 def teardown_class(cls): cls.classcount -= 1 def setup_method(self, method): self.id = eval(method.__name__[5:]) def test_42(self): assert self.classcount == 1 assert self.id == 42 def test_23(self): assert self.classcount == 1 assert self.id == 23 def teardown_module(module): assert module.TestStateFullThing.classcount == 0 """ For this example the control flow happens as follows:: import test_setup_flow_example setup_module(test_setup_flow_example) setup_class(TestStateFullThing) instance = TestStateFullThing() setup_method(instance, instance.test_42) instance.test_42() setup_method(instance, instance.test_23) instance.test_23() teardown_class(TestStateFullThing) teardown_module(test_setup_flow_example) Note that ``setup_class(TestStateFullThing)`` is called and not ``TestStateFullThing.setup_class()`` which would require you to insert ``setup_class = classmethod(setup_class)`` to make your setup function callable. """
mit
tzaffi/git-in-practice-repo
book/lib/python2.7/site-packages/django/contrib/gis/tests/geo3d/tests.py
109
12136
from __future__ import absolute_import, unicode_literals import os import re from django.contrib.gis.gdal import HAS_GDAL from django.contrib.gis.geos import HAS_GEOS from django.contrib.gis.tests.utils import postgis from django.test import TestCase from django.utils._os import upath from django.utils.unittest import skipUnless if HAS_GEOS: from django.contrib.gis.db.models import Union, Extent3D from django.contrib.gis.geos import GEOSGeometry, LineString, Point, Polygon from .models import (City3D, Interstate2D, Interstate3D, InterstateProj2D, InterstateProj3D, Point2D, Point3D, MultiPoint3D, Polygon2D, Polygon3D) if HAS_GDAL: from django.contrib.gis.utils import LayerMapping, LayerMapError data_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), '..', 'data')) city_file = os.path.join(data_path, 'cities', 'cities.shp') vrt_file = os.path.join(data_path, 'test_vrt', 'test_vrt.vrt') # The coordinates of each city, with Z values corresponding to their # altitude in meters. city_data = ( ('Houston', (-95.363151, 29.763374, 18)), ('Dallas', (-96.801611, 32.782057, 147)), ('Oklahoma City', (-97.521157, 34.464642, 380)), ('Wellington', (174.783117, -41.315268, 14)), ('Pueblo', (-104.609252, 38.255001, 1433)), ('Lawrence', (-95.235060, 38.971823, 251)), ('Chicago', (-87.650175, 41.850385, 181)), ('Victoria', (-123.305196, 48.462611, 15)), ) # Reference mapping of city name to its altitude (Z value). city_dict = dict((name, coords) for name, coords in city_data) # 3D freeway data derived from the National Elevation Dataset: # http://seamless.usgs.gov/products/9arc.php interstate_data = ( ('I-45', 'LINESTRING(-95.3708481 29.7765870 11.339,-95.3694580 29.7787980 4.536,-95.3690305 29.7797359 9.762,-95.3691886 29.7812450 12.448,-95.3696447 29.7850144 10.457,-95.3702511 29.7868518 9.418,-95.3706724 29.7881286 14.858,-95.3711632 29.7896157 15.386,-95.3714525 29.7936267 13.168,-95.3717848 29.7955007 15.104,-95.3717719 29.7969804 16.516,-95.3717305 29.7982117 13.923,-95.3717254 29.8000778 14.385,-95.3719875 29.8013539 15.160,-95.3720575 29.8026785 15.544,-95.3721321 29.8040912 14.975,-95.3722074 29.8050998 15.688,-95.3722779 29.8060430 16.099,-95.3733818 29.8076750 15.197,-95.3741563 29.8103686 17.268,-95.3749458 29.8129927 19.857,-95.3763564 29.8144557 15.435)', ( 11.339, 4.536, 9.762, 12.448, 10.457, 9.418, 14.858, 15.386, 13.168, 15.104, 16.516, 13.923, 14.385, 15.16 , 15.544, 14.975, 15.688, 16.099, 15.197, 17.268, 19.857, 15.435), ), ) # Bounding box polygon for inner-loop of Houston (in projected coordinate # system 32140), with elevation values from the National Elevation Dataset # (see above). bbox_data = ( 'POLYGON((941527.97 4225693.20,962596.48 4226349.75,963152.57 4209023.95,942051.75 4208366.38,941527.97 4225693.20))', (21.71, 13.21, 9.12, 16.40, 21.71) ) @skipUnless(HAS_GEOS and HAS_GDAL and postgis, "Geos, GDAL and postgis are required.") class Geo3DTest(TestCase): """ Only a subset of the PostGIS routines are 3D-enabled, and this TestCase tries to test the features that can handle 3D and that are also available within GeoDjango. For more information, see the PostGIS docs on the routines that support 3D: http://postgis.refractions.net/documentation/manual-1.4/ch08.html#PostGIS_3D_Functions """ def _load_interstate_data(self): # Interstate (2D / 3D and Geographic/Projected variants) for name, line, exp_z in interstate_data: line_3d = GEOSGeometry(line, srid=4269) line_2d = LineString([l[:2] for l in line_3d.coords], srid=4269) # Creating a geographic and projected version of the # interstate in both 2D and 3D. Interstate3D.objects.create(name=name, line=line_3d) InterstateProj3D.objects.create(name=name, line=line_3d) Interstate2D.objects.create(name=name, line=line_2d) InterstateProj2D.objects.create(name=name, line=line_2d) def _load_city_data(self): for name, pnt_data in city_data: City3D.objects.create(name=name, point=Point(*pnt_data, srid=4326)) def _load_polygon_data(self): bbox_wkt, bbox_z = bbox_data bbox_2d = GEOSGeometry(bbox_wkt, srid=32140) bbox_3d = Polygon(tuple((x, y, z) for (x, y), z in zip(bbox_2d[0].coords, bbox_z)), srid=32140) Polygon2D.objects.create(name='2D BBox', poly=bbox_2d) Polygon3D.objects.create(name='3D BBox', poly=bbox_3d) def test_3d_hasz(self): """ Make sure data is 3D and has expected Z values -- shouldn't change because of coordinate system. """ self._load_interstate_data() for name, line, exp_z in interstate_data: interstate = Interstate3D.objects.get(name=name) interstate_proj = InterstateProj3D.objects.get(name=name) for i in [interstate, interstate_proj]: self.assertTrue(i.line.hasz) self.assertEqual(exp_z, tuple(i.line.z)) self._load_city_data() for name, pnt_data in city_data: city = City3D.objects.get(name=name) z = pnt_data[2] self.assertTrue(city.point.hasz) self.assertEqual(z, city.point.z) def test_3d_polygons(self): """ Test the creation of polygon 3D models. """ self._load_polygon_data() p3d = Polygon3D.objects.get(name='3D BBox') self.assertTrue(p3d.poly.hasz) self.assertIsInstance(p3d.poly, Polygon) self.assertEqual(p3d.poly.srid, 32140) def test_3d_layermapping(self): """ Testing LayerMapping on 3D models. """ point_mapping = {'point' : 'POINT'} mpoint_mapping = {'mpoint' : 'MULTIPOINT'} # The VRT is 3D, but should still be able to map sans the Z. lm = LayerMapping(Point2D, vrt_file, point_mapping, transform=False) lm.save() self.assertEqual(3, Point2D.objects.count()) # The city shapefile is 2D, and won't be able to fill the coordinates # in the 3D model -- thus, a LayerMapError is raised. self.assertRaises(LayerMapError, LayerMapping, Point3D, city_file, point_mapping, transform=False) # 3D model should take 3D data just fine. lm = LayerMapping(Point3D, vrt_file, point_mapping, transform=False) lm.save() self.assertEqual(3, Point3D.objects.count()) # Making sure LayerMapping.make_multi works right, by converting # a Point25D into a MultiPoint25D. lm = LayerMapping(MultiPoint3D, vrt_file, mpoint_mapping, transform=False) lm.save() self.assertEqual(3, MultiPoint3D.objects.count()) def test_kml(self): """ Test GeoQuerySet.kml() with Z values. """ self._load_city_data() h = City3D.objects.kml(precision=6).get(name='Houston') # KML should be 3D. # `SELECT ST_AsKML(point, 6) FROM geo3d_city3d WHERE name = 'Houston';` ref_kml_regex = re.compile(r'^<Point><coordinates>-95.363\d+,29.763\d+,18</coordinates></Point>$') self.assertTrue(ref_kml_regex.match(h.kml)) def test_geojson(self): """ Test GeoQuerySet.geojson() with Z values. """ self._load_city_data() h = City3D.objects.geojson(precision=6).get(name='Houston') # GeoJSON should be 3D # `SELECT ST_AsGeoJSON(point, 6) FROM geo3d_city3d WHERE name='Houston';` ref_json_regex = re.compile(r'^{"type":"Point","coordinates":\[-95.363151,29.763374,18(\.0+)?\]}$') self.assertTrue(ref_json_regex.match(h.geojson)) def test_union(self): """ Testing the Union aggregate of 3D models. """ # PostGIS query that returned the reference EWKT for this test: # `SELECT ST_AsText(ST_Union(point)) FROM geo3d_city3d;` self._load_city_data() ref_ewkt = 'SRID=4326;MULTIPOINT(-123.305196 48.462611 15,-104.609252 38.255001 1433,-97.521157 34.464642 380,-96.801611 32.782057 147,-95.363151 29.763374 18,-95.23506 38.971823 251,-87.650175 41.850385 181,174.783117 -41.315268 14)' ref_union = GEOSGeometry(ref_ewkt) union = City3D.objects.aggregate(Union('point'))['point__union'] self.assertTrue(union.hasz) self.assertEqual(ref_union, union) def test_extent(self): """ Testing the Extent3D aggregate for 3D models. """ self._load_city_data() # `SELECT ST_Extent3D(point) FROM geo3d_city3d;` ref_extent3d = (-123.305196, -41.315268, 14,174.783117, 48.462611, 1433) extent1 = City3D.objects.aggregate(Extent3D('point'))['point__extent3d'] extent2 = City3D.objects.extent3d() def check_extent3d(extent3d, tol=6): for ref_val, ext_val in zip(ref_extent3d, extent3d): self.assertAlmostEqual(ref_val, ext_val, tol) for e3d in [extent1, extent2]: check_extent3d(e3d) def test_perimeter(self): """ Testing GeoQuerySet.perimeter() on 3D fields. """ self._load_polygon_data() # Reference query for values below: # `SELECT ST_Perimeter3D(poly), ST_Perimeter2D(poly) FROM geo3d_polygon3d;` ref_perim_3d = 76859.2620451 ref_perim_2d = 76859.2577803 tol = 6 self.assertAlmostEqual(ref_perim_2d, Polygon2D.objects.perimeter().get(name='2D BBox').perimeter.m, tol) self.assertAlmostEqual(ref_perim_3d, Polygon3D.objects.perimeter().get(name='3D BBox').perimeter.m, tol) def test_length(self): """ Testing GeoQuerySet.length() on 3D fields. """ # ST_Length_Spheroid Z-aware, and thus does not need to use # a separate function internally. # `SELECT ST_Length_Spheroid(line, 'SPHEROID["GRS 1980",6378137,298.257222101]') # FROM geo3d_interstate[2d|3d];` self._load_interstate_data() tol = 3 ref_length_2d = 4368.1721949481 ref_length_3d = 4368.62547052088 self.assertAlmostEqual(ref_length_2d, Interstate2D.objects.length().get(name='I-45').length.m, tol) self.assertAlmostEqual(ref_length_3d, Interstate3D.objects.length().get(name='I-45').length.m, tol) # Making sure `ST_Length3D` is used on for a projected # and 3D model rather than `ST_Length`. # `SELECT ST_Length(line) FROM geo3d_interstateproj2d;` ref_length_2d = 4367.71564892392 # `SELECT ST_Length3D(line) FROM geo3d_interstateproj3d;` ref_length_3d = 4368.16897234101 self.assertAlmostEqual(ref_length_2d, InterstateProj2D.objects.length().get(name='I-45').length.m, tol) self.assertAlmostEqual(ref_length_3d, InterstateProj3D.objects.length().get(name='I-45').length.m, tol) def test_scale(self): """ Testing GeoQuerySet.scale() on Z values. """ self._load_city_data() # Mapping of City name to reference Z values. zscales = (-3, 4, 23) for zscale in zscales: for city in City3D.objects.scale(1.0, 1.0, zscale): self.assertEqual(city_dict[city.name][2] * zscale, city.scale.z) def test_translate(self): """ Testing GeoQuerySet.translate() on Z values. """ self._load_city_data() ztranslations = (5.23, 23, -17) for ztrans in ztranslations: for city in City3D.objects.translate(0, 0, ztrans): self.assertEqual(city_dict[city.name][2] + ztrans, city.translate.z)
mit
shaunstanislaus/ibis
ibis/tasks.py
7
7836
# Copyright 2014 Cloudera Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import traceback from cPickle import loads as pickle_load from ibis.cloudpickle import dumps as pickle_dump from ibis.wire import PackedMessageReader, PackedMessageWriter import ibis.wire as wire try: import ibis.comms as comms except ImportError: pass class IbisTaskMessage(object): """ Prototype wire protocol for task descriptions uint32_t semaphore_id uint32_t shmem_name_len char* shmem_name uint64_t shmem_offset uint64_t shmem_size """ def __init__(self, semaphore_id, shmem_name, shmem_offset, shmem_size): self.semaphore_id = semaphore_id self.shmem_name = shmem_name self.shmem_offset = shmem_offset self.shmem_size = shmem_size @classmethod def decode(self, message): """ Convert from the bytestring wire protocol Parameters ---------- message : bytes Returns ------- message : IbisTaskMessage """ buf = PackedMessageReader(message) sem_id = buf.uint32() shmem_name = buf.string() shmem_offset = buf.uint64() shmem_size = buf.uint64() return IbisTaskMessage(sem_id, shmem_name, shmem_offset, shmem_size) def encode(self): """ Format this message as a bytestring according to the current version of the wire protocol. Returns ------- encoded : bytes """ buf = PackedMessageWriter() buf.uint32(self.semaphore_id) buf.string(self.shmem_name) buf.uint64(self.shmem_offset) buf.uint64(self.shmem_size) return buf.get_result() class Task(object): """ Prototype Run task in a thread, capture tracebacks or other problems. """ def __init__(self, shmem): self.shmem = shmem self.complete = False def mark_success(self): wire.write_uint8(self.shmem, 1) def mark_failure(self): wire.write_uint8(self.shmem, 0) def execute(self): pass def run(self): raise NotImplementedError def done(self): pass _task_registry = {} def register_task(kind, task_class, override=False): """ Register a new task implementation with the execution system """ if kind in _task_registry and not override: raise KeyError('Task of type %s is already defined and ' 'override is False') _task_registry[kind] = task_class class IbisTaskExecutor(object): """ Runs the requested task and handles locking, exception reporting, and so forth. """ def __init__(self, task_msg): self.task_msg = task_msg self.lock = comms.IPCLock(self.task_msg.semaphore_id) self.shmem = comms.SharedMmap(self.task_msg.shmem_name, self.task_msg.shmem_size, offset=self.task_msg.shmem_offset) def _cycle_ipc_lock(self): # TODO: I put this here as a failsafe in case the task needs to bail # out for a known reason and we want to immediately release control to # the master process self.lock.acquire() self.lock.release() def execute(self): # TODO: Timeout concerns self.lock.acquire() # TODO: this can break in various ways on bad input task_type = wire.read_string(self.shmem) try: klass = _task_registry[task_type] task = klass(self.shmem) task.run() except: self.shmem.seek(0) # XXX: Failure indicator wire.write_uint8(self.shmem, 0) tb = traceback.format_exc() # HACK: Traceback string must be truncated so it will fit in the # shared memory (along with the uint32 length prefix) if len(tb) + 5 > len(self.shmem): tb = tb[:len(self.shmem) - 5] wire.write_string(self.shmem, tb) finally: self.lock.release() # --------------------------------------------------------------------- # Ping pong task for testing class PingPongTask(Task): def run(self): self.shmem.seek(0) self.mark_success() wire.write_string(self.shmem, 'pong') register_task('ping', PingPongTask) # --------------------------------------------------------------------- # Aggregation execution tasks class AggregationTask(Task): def _write_response(self, agg_inst): self.shmem.seek(0) self.mark_success() serialized_inst = pickle_dump(agg_inst) wire.write_string(self.shmem, serialized_inst) class AggregationUpdateTask(AggregationTask): """ Task header layout - serialized agg class - prior state flag 1/0 - (optional) serialized prior state - serialized table fragment """ def __init__(self, shmem): AggregationTask.__init__(self, shmem) self._read_header() def _read_header(self): reader = wire.PackedMessageReader(self.shmem) # Unpack header self.agg_class_pickled = reader.string() has_prior_state = reader.uint8() != 0 if has_prior_state: self.prior_state = pickle_load(reader.string()) else: self.prior_state = None def run(self): if self.prior_state is not None: agg_inst = self.prior_state else: klass = pickle_load(self.agg_class_pickled) agg_inst = klass() args = self._deserialize_args() agg_inst.update(*args) self._write_response(agg_inst) def _deserialize_args(self): # TODO: we need some mechanism to indicate how the data should be # deserialized before passing to the aggregator. For now, will assume # "pandas-friendly" NumPy-format # Deserialize data fragment table_reader = comms.IbisTableReader(self.shmem) args = [] for i in range(table_reader.ncolumns): col = table_reader.get_column(i) arg = col.to_numpy_for_pandas() args.append(arg) return args class AggregationMergeTask(AggregationTask): def __init__(self, shmem): AggregationTask.__init__(self, shmem) reader = wire.PackedMessageReader(shmem) # TODO: may wish to merge more than 2 at a time? # Unpack header self.left_inst = pickle_load(reader.string()) self.right_inst = pickle_load(reader.string()) def run(self): # Objects to merge stored in length-prefixed strings in shared memory merged = self.left_inst.merge(self.right_inst) self._write_response(merged) class AggregationFinalizeTask(AggregationTask): def __init__(self, shmem): AggregationTask.__init__(self, shmem) reader = wire.PackedMessageReader(shmem) self.state = pickle_load(reader.string()) def run(self): # Single length-prefixed string to finalize result = self.state.finalize() self._write_response(result) register_task('agg-update', AggregationUpdateTask) register_task('agg-merge', AggregationMergeTask) register_task('agg-finalize', AggregationFinalizeTask)
apache-2.0
CandidCypher/code-for-blog
2012/plugins_python/htmlize/core.py
13
2324
#------------------------------------------------------------------------------- # htmlize: htmlize/core.py # # The core functionality of htmlize. # # Eli Bendersky (eliben@gmail.com) # This code is in the public domain #------------------------------------------------------------------------------- from collections import namedtuple import re # Regex for matching/capturing role text. # E.g. :name:`text` - first capture group is "name", second group is "text" # ROLE_REGEX = re.compile(r':(\w+):`([^`]*)`') RoleMatch = namedtuple('RoleMatch', 'name contents') def htmlize(post, db, plugins=[]): """ pass """ contents = post.contents # Plugins are classes - we need to instantiate them to get objects. plugins = [P(post, db) for P in plugins] # Split the contents to paragraphs paragraphs = re.split(r'\n\n+', contents) for i, p in enumerate(paragraphs): paragraphs[i] = '<p>' + p.replace('\n', ' ') + '</p>' contents = '\n\n'.join(paragraphs) # Find roles in the contents. Create a list of parts, where each # part is either text that has no roles in it, or a RoleMatch # object. pos = 0 parts = [] while True: match = ROLE_REGEX.search(contents, pos) if match is None: parts.append(contents[pos:]) break parts.append(contents[pos:match.start()]) parts.append(RoleMatch(match.group(1), match.group(2))) pos = match.end() # Ask plugins to act on roles for i, part in enumerate(parts): if isinstance(part, RoleMatch): parts[i] = _plugin_replace_role( part.name, part.contents, plugins) # Build full contents back again, and ask plugins to act on # contents. contents = ''.join(parts) for p in plugins: contents_hook = p.get_contents_hook() if contents_hook: contents = contents_hook(contents) return contents def _plugin_replace_role(name, contents, plugins): """ The first plugin that handles this role is used. """ for p in plugins: role_hook = p.get_role_hook(name) if role_hook: return role_hook(contents) # If no plugin handling this role is found, return its original form return ':{0}:`{1}`'.format(name, contents)
unlicense
ALabate/linux-asus-T200TA
tools/perf/tests/attr.py
1266
9424
#! /usr/bin/python import os import sys import glob import optparse import tempfile import logging import shutil import ConfigParser class Fail(Exception): def __init__(self, test, msg): self.msg = msg self.test = test def getMsg(self): return '\'%s\' - %s' % (self.test.path, self.msg) class Unsup(Exception): def __init__(self, test): self.test = test def getMsg(self): return '\'%s\'' % self.test.path class Event(dict): terms = [ 'cpu', 'flags', 'type', 'size', 'config', 'sample_period', 'sample_type', 'read_format', 'disabled', 'inherit', 'pinned', 'exclusive', 'exclude_user', 'exclude_kernel', 'exclude_hv', 'exclude_idle', 'mmap', 'comm', 'freq', 'inherit_stat', 'enable_on_exec', 'task', 'watermark', 'precise_ip', 'mmap_data', 'sample_id_all', 'exclude_host', 'exclude_guest', 'exclude_callchain_kernel', 'exclude_callchain_user', 'wakeup_events', 'bp_type', 'config1', 'config2', 'branch_sample_type', 'sample_regs_user', 'sample_stack_user', ] def add(self, data): for key, val in data: log.debug(" %s = %s" % (key, val)) self[key] = val def __init__(self, name, data, base): log.debug(" Event %s" % name); self.name = name; self.group = '' self.add(base) self.add(data) def compare_data(self, a, b): # Allow multiple values in assignment separated by '|' a_list = a.split('|') b_list = b.split('|') for a_item in a_list: for b_item in b_list: if (a_item == b_item): return True elif (a_item == '*') or (b_item == '*'): return True return False def equal(self, other): for t in Event.terms: log.debug(" [%s] %s %s" % (t, self[t], other[t])); if not self.has_key(t) or not other.has_key(t): return False if not self.compare_data(self[t], other[t]): return False return True def diff(self, other): for t in Event.terms: if not self.has_key(t) or not other.has_key(t): continue if not self.compare_data(self[t], other[t]): log.warning("expected %s=%s, got %s" % (t, self[t], other[t])) # Test file description needs to have following sections: # [config] # - just single instance in file # - needs to specify: # 'command' - perf command name # 'args' - special command arguments # 'ret' - expected command return value (0 by default) # # [eventX:base] # - one or multiple instances in file # - expected values assignments class Test(object): def __init__(self, path, options): parser = ConfigParser.SafeConfigParser() parser.read(path) log.warning("running '%s'" % path) self.path = path self.test_dir = options.test_dir self.perf = options.perf self.command = parser.get('config', 'command') self.args = parser.get('config', 'args') try: self.ret = parser.get('config', 'ret') except: self.ret = 0 self.expect = {} self.result = {} log.debug(" loading expected events"); self.load_events(path, self.expect) def is_event(self, name): if name.find("event") == -1: return False else: return True def load_events(self, path, events): parser_event = ConfigParser.SafeConfigParser() parser_event.read(path) # The event record section header contains 'event' word, # optionaly followed by ':' allowing to load 'parent # event' first as a base for section in filter(self.is_event, parser_event.sections()): parser_items = parser_event.items(section); base_items = {} # Read parent event if there's any if (':' in section): base = section[section.index(':') + 1:] parser_base = ConfigParser.SafeConfigParser() parser_base.read(self.test_dir + '/' + base) base_items = parser_base.items('event') e = Event(section, parser_items, base_items) events[section] = e def run_cmd(self, tempdir): cmd = "PERF_TEST_ATTR=%s %s %s -o %s/perf.data %s" % (tempdir, self.perf, self.command, tempdir, self.args) ret = os.WEXITSTATUS(os.system(cmd)) log.info(" '%s' ret %d " % (cmd, ret)) if ret != int(self.ret): raise Unsup(self) def compare(self, expect, result): match = {} log.debug(" compare"); # For each expected event find all matching # events in result. Fail if there's not any. for exp_name, exp_event in expect.items(): exp_list = [] log.debug(" matching [%s]" % exp_name) for res_name, res_event in result.items(): log.debug(" to [%s]" % res_name) if (exp_event.equal(res_event)): exp_list.append(res_name) log.debug(" ->OK") else: log.debug(" ->FAIL"); log.debug(" match: [%s] matches %s" % (exp_name, str(exp_list))) # we did not any matching event - fail if (not exp_list): exp_event.diff(res_event) raise Fail(self, 'match failure'); match[exp_name] = exp_list # For each defined group in the expected events # check we match the same group in the result. for exp_name, exp_event in expect.items(): group = exp_event.group if (group == ''): continue for res_name in match[exp_name]: res_group = result[res_name].group if res_group not in match[group]: raise Fail(self, 'group failure') log.debug(" group: [%s] matches group leader %s" % (exp_name, str(match[group]))) log.debug(" matched") def resolve_groups(self, events): for name, event in events.items(): group_fd = event['group_fd']; if group_fd == '-1': continue; for iname, ievent in events.items(): if (ievent['fd'] == group_fd): event.group = iname log.debug('[%s] has group leader [%s]' % (name, iname)) break; def run(self): tempdir = tempfile.mkdtemp(); try: # run the test script self.run_cmd(tempdir); # load events expectation for the test log.debug(" loading result events"); for f in glob.glob(tempdir + '/event*'): self.load_events(f, self.result); # resolve group_fd to event names self.resolve_groups(self.expect); self.resolve_groups(self.result); # do the expectation - results matching - both ways self.compare(self.expect, self.result) self.compare(self.result, self.expect) finally: # cleanup shutil.rmtree(tempdir) def run_tests(options): for f in glob.glob(options.test_dir + '/' + options.test): try: Test(f, options).run() except Unsup, obj: log.warning("unsupp %s" % obj.getMsg()) def setup_log(verbose): global log level = logging.CRITICAL if verbose == 1: level = logging.WARNING if verbose == 2: level = logging.INFO if verbose >= 3: level = logging.DEBUG log = logging.getLogger('test') log.setLevel(level) ch = logging.StreamHandler() ch.setLevel(level) formatter = logging.Formatter('%(message)s') ch.setFormatter(formatter) log.addHandler(ch) USAGE = '''%s [OPTIONS] -d dir # tests dir -p path # perf binary -t test # single test -v # verbose level ''' % sys.argv[0] def main(): parser = optparse.OptionParser(usage=USAGE) parser.add_option("-t", "--test", action="store", type="string", dest="test") parser.add_option("-d", "--test-dir", action="store", type="string", dest="test_dir") parser.add_option("-p", "--perf", action="store", type="string", dest="perf") parser.add_option("-v", "--verbose", action="count", dest="verbose") options, args = parser.parse_args() if args: parser.error('FAILED wrong arguments %s' % ' '.join(args)) return -1 setup_log(options.verbose) if not options.test_dir: print 'FAILED no -d option specified' sys.exit(-1) if not options.test: options.test = 'test*' try: run_tests(options) except Fail, obj: print "FAILED %s" % obj.getMsg(); sys.exit(-1) sys.exit(0) if __name__ == '__main__': main()
gpl-2.0
hpparvi/PyTransit
pytransit/param/prior.py
1
3826
# PyTransit: fast and easy exoplanet transit modelling in Python. # Copyright (C) 2010-2019 Hannu Parviainen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. import math as m from numpy import inf, zeros, pi, log, where, exp from numpy.random import normal, uniform from scipy.stats import gamma as gm, laplace class Prior: def __init__(self): raise NotImplementedError def logpdf(self, v): raise NotImplementedError def rvs(self, size): raise NotImplementedError class DefaultPrior(Prior): def logpdf(self, v: float): return 0 def rvs(self, size): return zeros(size) class NormalPrior(Prior): def __init__(self, mean: float, std: float): self.mean = float(mean) self.std = float(std) self._f1 = 1 / m.sqrt(2*pi*std**2) self._lf1 = m.log(self._f1) self._f2 = 1 / (2*std**2) def logpdf(self, x): return self._lf1 - self._f2*(x - self.mean)**2 def rvs(self, size=1): return normal(self.mean, self.std, size) def __str__(self): return f'N(μ = {self.mean}, σ = {self.std})' def __repr__(self): return f'NormalPrior({self.mean}, {self.std})' class UniformPrior(Prior): def __init__(self, a: float, b: float): self.a, self.b = a, b self.lnc = m.log(b-a) def logpdf(self, v): return where((self.a < v) & (v < self.b), self.lnc, -inf) def rvs(self, size=1): return uniform(self.a, self.b, size) def __str__(self): return f'U(a = {self.a}, b = {self.b})' def __repr__(self): return f'UniformPrior({self.a}, {self.b})' class JeffreysPrior(Prior): def __init__(self, x0: float, x1: float): self.x0 = x0 self.x1 = x1 self._f = log(x1 / x0) def pdf(self, x): return where((x > self.x0) & (x < self.x1), 1. / (x * self._f), -inf) def logpdf(self, x): return where((x > self.x0) & (x < self.x1), -log(x * self._f), -inf) def rvs(self, size=1): return exp(uniform(log(self.x0), log(self.x1), size)) class LaplacePrior(Prior): def __init__(self, mean, mad): self.mean = mean self.mad = mad self._n = 1. / (2 * mad) self._ln = log(self._n) self._p = laplace(mean, mad) def logpdf(self, v): return self._ln - abs(v - self.mean) / self.mad def rvs(self, size): return self._p.rvs(size) def __str__(self): return f'L(μ = {self.mean}, MAD = {self.mad})' def __repr__(self): return f'LaplacePrior({self.mean}, {self.mad})' class LogLogisticPrior(Prior): def __init__(self, a, b): self.a, self.b = a, b def logpdf(self, v): if not 1e-3 < v < 1.: return -inf else: a, b = self.a, self.b return m.log((b / a) * (v / a) ** (b - 1.) / (1. + (v / a) ** b) ** 2) def rvs(self, size=1): return uniform(1e-3, 1.0, size) class GammaPrior(Prior): def __init__(self, a): self.a = a self.A = -m.lgamma(a) def logpdf(self, x): return self.A + (self.a - 1.) * log(x) - x def rvs(self, size): return gm(self.a).rvs(size)
gpl-2.0
varunagrawal/azure-services
varunagrawal/site-packages/django/core/management/commands/test.py
111
2965
import sys import os from optparse import make_option, OptionParser from django.conf import settings from django.core.management.base import BaseCommand from django.test.utils import get_runner class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option('--noinput', action='store_false', dest='interactive', default=True, help='Tells Django to NOT prompt the user for input of any kind.'), make_option('--failfast', action='store_true', dest='failfast', default=False, help='Tells Django to stop running the test suite after first ' 'failed test.'), make_option('--testrunner', action='store', dest='testrunner', help='Tells Django to use specified test runner class instead of ' 'the one specified by the TEST_RUNNER setting.'), make_option('--liveserver', action='store', dest='liveserver', default=None, help='Overrides the default address where the live server (used ' 'with LiveServerTestCase) is expected to run from. The ' 'default value is localhost:8081.'), ) help = ('Runs the test suite for the specified applications, or the ' 'entire site if no apps are specified.') args = '[appname ...]' requires_model_validation = False def __init__(self): self.test_runner = None super(Command, self).__init__() def run_from_argv(self, argv): """ Pre-parse the command line to extract the value of the --testrunner option. This allows a test runner to define additional command line arguments. """ option = '--testrunner=' for arg in argv[2:]: if arg.startswith(option): self.test_runner = arg[len(option):] break super(Command, self).run_from_argv(argv) def create_parser(self, prog_name, subcommand): test_runner_class = get_runner(settings, self.test_runner) options = self.option_list + getattr( test_runner_class, 'option_list', ()) return OptionParser(prog=prog_name, usage=self.usage(subcommand), version=self.get_version(), option_list=options) def handle(self, *test_labels, **options): from django.conf import settings from django.test.utils import get_runner TestRunner = get_runner(settings, options.get('testrunner')) options['verbosity'] = int(options.get('verbosity')) if options.get('liveserver') is not None: os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = options['liveserver'] del options['liveserver'] test_runner = TestRunner(**options) failures = test_runner.run_tests(test_labels) if failures: sys.exit(bool(failures))
gpl-2.0
ahmadghizzawi/aub-ml
assignment2/linear-regression.py
1
5573
import numpy as np from numpy import sign as sign import matplotlib.pyplot as plt class Utils: @staticmethod def generate_set(N): """Generates an n by 3 uniformly distributed dataset""" # Generate random uniformally distributes points training_set = np.random.uniform(-1, 1, size=(N, 2)) # Insert x0 values into the begining of the array. x0 = np.ones((N, 1)) training_set = np.insert(training_set, [0], x0, axis=1) # Set training_set and labels as instance attributes return training_set @staticmethod def display_figures(num_of_runs, ein_total, eout_total): print('Average Ein(g): ' + str(ein_total/num_of_runs)) print('Average Eout(g): ' + str(eout_total/num_of_runs)) @staticmethod def plot(training_set, labels, target_weights, best_hypothesis): # change the axis to fit our dataset plt.axis([-1, 1, -1, 1]) # breakdown the dataset into two separate arrays, each array representing their label by f(x) training_set_above_line = [] training_set_below_line = [] for i in range(len(labels)): if labels[i] == 1: training_set_above_line.append(training_set[i]) else: training_set_below_line.append(training_set[i]) training_set_above_line = np.array(training_set_above_line) training_set_below_line = np.array(training_set_below_line) # plot the sets if training_set_above_line.size > 0: training_set_x1_1 = training_set_above_line[:, 1] training_set_x2_1 = training_set_above_line[:, 2] plt.scatter(training_set_x1_1, training_set_x2_1, c='b') if training_set_below_line.size > 0: training_set_x1_neg_1 = training_set_below_line[:, 1] training_set_x2_neg_1 = training_set_below_line[:, 2] plt.scatter(training_set_x1_neg_1, training_set_x2_neg_1, c='r') # generate 50 evenly spaced numbers from -1 to 1. line = np.linspace(-1, 1) # plot the f(x) in blue m, b = -target_weights[1] / target_weights[2], -target_weights[0] / target_weights[2] plt.plot(line, m * line + b, 'b-', label='f(x)') # plot the g(x) in dashed red m1, b1 = -best_hypothesis[1] / best_hypothesis[2], -best_hypothesis[0] / best_hypothesis[2] plt.plot(line, m1 * line + b1, 'r--', label='h(x)') plt.show() @staticmethod def generate_target_weights(): # create random line as target function f(x) point1 = [1, np.random.uniform(-1, 1), np.random.uniform(-1, 1)] point2 = [1, np.random.uniform(-1, 1), np.random.uniform(-1, 1)] x1A = point1[1] x2A = point1[2] x1B = point2[1] x2B = point2[2] # create the target function weights based on the random points target_weights = np.array([x1B * x2A - x1A * x2B, x2B - x1A, x1A - x1B]) return target_weights @staticmethod def evaluate_difference(target_labels, hypothesis_labels): # evaluate the difference between f and g on in or out-of-sample data. sample_size = len(target_labels) i = 0 total_misclassified = 0 while i < sample_size: target_classification = target_labels[i] hypothesis_classification = hypothesis_labels[i] if target_classification != hypothesis_classification: total_misclassified += 1 i += 1 return total_misclassified / sample_size class LinearRegression: def __init__(self): self.target_weights = Utils.generate_target_weights() @staticmethod def apply(w, x): # apply h(x) return sign(np.dot(w, x)) @staticmethod def learn(X, Y): # learn from misclassifications pseudo_inverse = np.linalg.pinv(X) return np.dot(pseudo_inverse, Y) @staticmethod def get_labels(w, X): labels = [] [labels.append(LinearRegression.apply(w, x)) for x in X] return np.array([labels]).T def run(self, number_of_runs): ein_total = 0 eout_total = 0 best_model = None best_eout = None i = 0 while i < number_of_runs: # Generate data set and labels X = Utils.generate_set(100) Y = LinearRegression.get_labels(w=self.target_weights, X=X) # Generate hypothesis weights and labels hypothesis_weight = LinearRegression.learn(X, Y).T.flatten() hypothesis_labels = LinearRegression.get_labels(hypothesis_weight, X) # Calculate Ein total ein_total += Utils.evaluate_difference(Y, hypothesis_labels) # Generate 1000 out_of_sample points X_out = Utils.generate_set(1000) Y_out = LinearRegression.get_labels(w=self.target_weights, X=X_out) # Label out_of_sample data set with hypothesis hypothesis_labels_out = LinearRegression.get_labels(hypothesis_weight, X_out) # Calculate Eout total eout = Utils.evaluate_difference(Y_out, hypothesis_labels_out) eout_total += eout if best_eout is None or eout < best_eout: best_eout = eout best_model = hypothesis_weight i += 1 Utils.display_figures(number_of_runs, ein_total, eout_total) Utils.plot(X, Y, self.target_weights, best_model) model = LinearRegression() model.run(1000)
unlicense
HiroIshikawa/21playground
payblog/blog/lib/python3.5/site-packages/markdown/extensions/abbr.py
123
2738
''' Abbreviation Extension for Python-Markdown ========================================== This extension adds abbreviation handling to Python-Markdown. See <https://pythonhosted.org/Markdown/extensions/abbreviations.html> for documentation. Oringinal code Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/) and [Seemant Kulleen](http://www.kulleen.org/) All changes Copyright 2008-2014 The Python Markdown Project License: [BSD](http://www.opensource.org/licenses/bsd-license.php) ''' from __future__ import absolute_import from __future__ import unicode_literals from . import Extension from ..preprocessors import Preprocessor from ..inlinepatterns import Pattern from ..util import etree, AtomicString import re # Global Vars ABBR_REF_RE = re.compile(r'[*]\[(?P<abbr>[^\]]*)\][ ]?:\s*(?P<title>.*)') class AbbrExtension(Extension): """ Abbreviation Extension for Python-Markdown. """ def extendMarkdown(self, md, md_globals): """ Insert AbbrPreprocessor before ReferencePreprocessor. """ md.preprocessors.add('abbr', AbbrPreprocessor(md), '<reference') class AbbrPreprocessor(Preprocessor): """ Abbreviation Preprocessor - parse text for abbr references. """ def run(self, lines): ''' Find and remove all Abbreviation references from the text. Each reference is set as a new AbbrPattern in the markdown instance. ''' new_text = [] for line in lines: m = ABBR_REF_RE.match(line) if m: abbr = m.group('abbr').strip() title = m.group('title').strip() self.markdown.inlinePatterns['abbr-%s' % abbr] = \ AbbrPattern(self._generate_pattern(abbr), title) else: new_text.append(line) return new_text def _generate_pattern(self, text): ''' Given a string, returns an regex pattern to match that string. 'HTML' -> r'(?P<abbr>[H][T][M][L])' Note: we force each char as a literal match (in brackets) as we don't know what they will be beforehand. ''' chars = list(text) for i in range(len(chars)): chars[i] = r'[%s]' % chars[i] return r'(?P<abbr>\b%s\b)' % (r''.join(chars)) class AbbrPattern(Pattern): """ Abbreviation inline pattern. """ def __init__(self, pattern, title): super(AbbrPattern, self).__init__(pattern) self.title = title def handleMatch(self, m): abbr = etree.Element('abbr') abbr.text = AtomicString(m.group('abbr')) abbr.set('title', self.title) return abbr def makeExtension(*args, **kwargs): return AbbrExtension(*args, **kwargs)
mit
TheMOOCAgency/edx-platform
common/test/acceptance/pages/xblock/crowdsourcehinter_problem.py
11
1738
""" PageObject for Crowdsourcehinter """ from bok_choy.page_object import PageObject class CrowdsourcehinterProblemPage(PageObject): """ A PageObject representing the Crowdsourcehinter xblock. """ url = None def __init__(self, browser): """ Args: browser (selenium.webdriver): The Selenium-controlled browser that this page is loaded in. """ super(CrowdsourcehinterProblemPage, self).__init__(browser) def is_browser_on_page(self): return len(self.browser.find_elements_by_class_name('crowdsourcehinter_block')) > 0 def submit_text_answer(self, text): """ Submit an answer to the problem block """ self.q(css='input[type="text"]').fill(text) self.q(css='.action [data-value="Submit"]').click() self.wait_for_ajax() def get_hint_text(self): """ Return the hint shown to the student """ return self.q(css='div.csh_hint_text').text def get_student_answer_text(self): """ Check the student answer is set correctly """ return self.q(css='div.csh_hint_text').attrs('student_answer') def rate_hint(self): """ Click the rate_hint button """ self.q(css='div.csh_rate_hint').click() self.wait_for_ajax() def submit_new_hint(self, text): """ Fill in the textbox and submit a new hint """ self.q(css='.csh_student_hint_creation input[type="button"]').click() self.wait_for_ajax() self.q(css='.csh_student_text_input input[type="text"]').fill(text) self.q(css='.csh_submit_new input[type="button"]').click() self.wait_for_ajax()
agpl-3.0
guncoin/guncoin
test/functional/wallet_abandonconflict.py
12
8752
#!/usr/bin/env python3 # Copyright (c) 2014-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the abandontransaction RPC. The abandontransaction RPC marks a transaction and all its in-wallet descendants as abandoned which allows their inputs to be respent. It can be used to replace "stuck" or evicted transactions. It only works on transactions which are not included in a block and are not currently in the mempool. It has no effect on transactions which are already abandoned. """ from decimal import Decimal from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error, connect_nodes, disconnect_nodes, sync_blocks, sync_mempools class AbandonConflictTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.extra_args = [["-minrelaytxfee=0.00001"], []] def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): self.nodes[1].generate(100) sync_blocks(self.nodes) balance = self.nodes[0].getbalance() txA = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) txB = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) txC = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) sync_mempools(self.nodes) self.nodes[1].generate(1) # Can not abandon non-wallet transaction assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', lambda: self.nodes[0].abandontransaction(txid='ff' * 32)) # Can not abandon confirmed transaction assert_raises_rpc_error(-5, 'Transaction not eligible for abandonment', lambda: self.nodes[0].abandontransaction(txid=txA)) sync_blocks(self.nodes) newbalance = self.nodes[0].getbalance() assert(balance - newbalance < Decimal("0.001")) #no more than fees lost balance = newbalance # Disconnect nodes so node0's transactions don't get into node1's mempool disconnect_nodes(self.nodes[0], 1) # Identify the 10btc outputs nA = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txA, 1)["vout"]) if vout["value"] == Decimal("10")) nB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txB, 1)["vout"]) if vout["value"] == Decimal("10")) nC = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txC, 1)["vout"]) if vout["value"] == Decimal("10")) inputs =[] # spend 10btc outputs from txA and txB inputs.append({"txid":txA, "vout":nA}) inputs.append({"txid":txB, "vout":nB}) outputs = {} outputs[self.nodes[0].getnewaddress()] = Decimal("14.99998") outputs[self.nodes[1].getnewaddress()] = Decimal("5") signed = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs)) txAB1 = self.nodes[0].sendrawtransaction(signed["hex"]) # Identify the 14.99998btc output nAB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txAB1, 1)["vout"]) if vout["value"] == Decimal("14.99998")) #Create a child tx spending AB1 and C inputs = [] inputs.append({"txid":txAB1, "vout":nAB}) inputs.append({"txid":txC, "vout":nC}) outputs = {} outputs[self.nodes[0].getnewaddress()] = Decimal("24.9996") signed2 = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs)) txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"]) # Create a child tx spending ABC2 signed3_change = Decimal("24.999") inputs = [ {"txid":txABC2, "vout":0} ] outputs = { self.nodes[0].getnewaddress(): signed3_change } signed3 = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs)) # note tx is never directly referenced, only abandoned as a child of the above self.nodes[0].sendrawtransaction(signed3["hex"]) # In mempool txs from self should increase balance from change newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("30") + signed3_change) balance = newbalance # Restart the node with a higher min relay fee so the parent tx is no longer in mempool # TODO: redo with eviction self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) # Verify txs no longer in either node's mempool assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(len(self.nodes[1].getrawmempool()), 0) # Not in mempool txs from self should only reduce balance # inputs are still spent, but change not received newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - signed3_change) # Unconfirmed received funds that are not in mempool, also shouldn't show # up in unconfirmed balance unconfbalance = self.nodes[0].getunconfirmedbalance() + self.nodes[0].getbalance() assert_equal(unconfbalance, newbalance) # Also shouldn't show up in listunspent assert(not txABC2 in [utxo["txid"] for utxo in self.nodes[0].listunspent(0)]) balance = newbalance # Abandon original transaction and verify inputs are available again # including that the child tx was also abandoned self.nodes[0].abandontransaction(txAB1) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance + Decimal("30")) balance = newbalance # Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.00001"]) assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(self.nodes[0].getbalance(), balance) # But if it is received again then it is unabandoned # And since now in mempool, the change is available # But its child tx remains abandoned self.nodes[0].sendrawtransaction(signed["hex"]) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("20") + Decimal("14.99998")) balance = newbalance # Send child tx again so it is unabandoned self.nodes[0].sendrawtransaction(signed2["hex"]) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("10") - Decimal("14.99998") + Decimal("24.9996")) balance = newbalance # Remove using high relay fee again self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) assert_equal(len(self.nodes[0].getrawmempool()), 0) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("24.9996")) balance = newbalance # Create a double spend of AB1 by spending again from only A's 10 output # Mine double spend from node 1 inputs =[] inputs.append({"txid":txA, "vout":nA}) outputs = {} outputs[self.nodes[1].getnewaddress()] = Decimal("9.9999") tx = self.nodes[0].createrawtransaction(inputs, outputs) signed = self.nodes[0].signrawtransactionwithwallet(tx) self.nodes[1].sendrawtransaction(signed["hex"]) self.nodes[1].generate(1) connect_nodes(self.nodes[0], 1) sync_blocks(self.nodes) # Verify that B and C's 10 BTC outputs are available for spending again because AB1 is now conflicted newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance + Decimal("20")) balance = newbalance # There is currently a minor bug around this and so this test doesn't work. See Issue #7315 # Invalidate the block with the double spend and B's 10 BTC output should no longer be available # Don't think C's should either self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) newbalance = self.nodes[0].getbalance() #assert_equal(newbalance, balance - Decimal("10")) self.log.info("If balance has not declined after invalidateblock then out of mempool wallet tx which is no longer") self.log.info("conflicted has not resumed causing its inputs to be seen as spent. See Issue #7315") self.log.info(str(balance) + " -> " + str(newbalance) + " ?") if __name__ == '__main__': AbandonConflictTest().main()
mit
yoshinorim/mysql-5.6
xtrabackup/test/kewpie/lib/modes/dtr/dtr_test_execution.py
26
5597
#! /usr/bin/env python # -*- mode: python; indent-tabs-mode: nil; -*- # vim:expandtab:shiftwidth=2:tabstop=2:smarttab: # # Copyright (C) 2010 Patrick Crews # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ dtr_test_execution: code related to the execution of dtr test cases We are provided access to a testManager with dtr-specific testCases. We contact teh executionManager to produce the system and server configurations we need to execute a test. """ # imports import os import sys import subprocess import commands import lib.test_mgmt.test_execution as test_execution class testExecutor(test_execution.testExecutor): """ dtr-specific testExecutor We currently execute by sending test-case data to client/drizzletest...for now """ def execute_testCase (self): """ Execute a dtr testCase via calls to drizzletest (boo) Eventually, we will replace drizzletest with pythonic goodness, but we have these classes stored here for the moment """ test_execution.testExecutor.execute_testCase(self) self.status = 0 # generate command line drizzletest_cmd = self.generate_drizzletest_call() # call drizzletest self.execute_drizzletest(drizzletest_cmd) # analyze results self.current_test_status = self.process_drizzletest_output() self.set_server_status(self.current_test_status) def generate_drizzletest_call(self): """ Produce the command line we use to call drizzletest We have a healthy number of values, so we put this in a nice function """ drizzletest_arguments = [ '--no-defaults' , '--silent' , '--tmpdir=%s' %(self.master_server.tmpdir) , '--logdir=%s' %(self.master_server.logdir) , '--port=%d' %(self.master_server.master_port) , '--database=test' , '--user=root' , '--password=' #, '--testdir=%s' %(self.test_manager.testdir) , '--test-file=%s' %(self.current_testcase.testpath) , '--tail-lines=20' , '--timer-file=%s' %(self.master_server.timer_file) , '--result-file=%s' %(self.current_testcase.resultpath) ] if self.record_flag: # We want to record a new result drizzletest_arguments.append('--record') drizzletest_cmd = "%s %s %s" %( self.cmd_prefix , self.master_server.code_tree.drizzletest , " ".join(drizzletest_arguments)) return drizzletest_cmd def execute_drizzletest(self, drizzletest_cmd): """ Execute the commandline and return the result. We use subprocess as we can pass os.environ dicts and whatnot """ testcase_name = self.current_testcase.fullname self.time_manager.start(testcase_name,'test') #retcode, output = self.system_manager.execute_cmd( drizzletest_cmd # , must_pass = 0 ) drizzletest_outfile = os.path.join(self.logdir,'drizzletest.out') drizzletest_output = open(drizzletest_outfile,'w') drizzletest_subproc = subprocess.Popen( drizzletest_cmd , shell=True , cwd=self.system_manager.testdir , env=self.working_environment , stdout = drizzletest_output , stderr = subprocess.STDOUT ) drizzletest_subproc.wait() retcode = drizzletest_subproc.returncode execution_time = int(self.time_manager.stop(testcase_name)*1000) # millisec drizzletest_output.close() drizzletest_file = open(drizzletest_outfile,'r') output = ''.join(drizzletest_file.readlines()) drizzletest_file.close() self.logging.debug("drizzletest_retcode: %d" %(retcode)) self.current_test_retcode = retcode self.current_test_output = output self.current_test_exec_time = execution_time def process_drizzletest_output(self): """ Drizzletest has run, we now check out what we have """ retcode = self.current_test_retcode if retcode == 0: return 'pass' elif retcode == 62 or retcode == 15872: return 'skipped' elif retcode == 63 or retcode == 1: return 'fail' else: return 'fail'
gpl-2.0
ravibhure/ansible
lib/ansible/modules/cloud/docker/docker_volume.py
45
7796
#!/usr/bin/python # coding: utf-8 # # Copyright 2017 Red Hat | Ansible, Alex Grönholm <alex.gronholm@nextday.fi> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = u''' module: docker_volume version_added: "2.4" short_description: Manage Docker volumes description: - Create/remove Docker volumes. - Performs largely the same function as the "docker volume" CLI subcommand. options: name: description: - Name of the volume to operate on. required: true aliases: - volume_name driver: description: - Specify the type of volume. Docker provides the C(local) driver, but 3rd party drivers can also be used. default: local driver_options: description: - "Dictionary of volume settings. Consult docker docs for valid options and values: U(https://docs.docker.com/engine/reference/commandline/volume_create/#driver-specific-options)" labels: description: - List of labels to set for the volume force: description: - With state C(present) causes the volume to be deleted and recreated if the volume already exist and the driver, driver options or labels differ. This will cause any data in the existing volume to be lost. type: bool default: 'no' state: description: - C(absent) deletes the volume. - C(present) creates the volume, if it does not already exist. default: present choices: - absent - present extends_documentation_fragment: - docker author: - Alex Grönholm (@agronholm) requirements: - "python >= 2.6" - "docker-py >= 1.10.0" - "The docker server >= 1.9.0" ''' EXAMPLES = ''' - name: Create a volume docker_volume: name: volume_one - name: Remove a volume docker_volume: name: volume_one state: absent - name: Create a volume with options docker_volume: name: volume_two driver_options: type: btrfs device: /dev/sda2 ''' RETURN = ''' facts: description: Volume inspection results for the affected volume. returned: success type: dict sample: {} ''' try: from docker.errors import APIError except ImportError: # missing docker-py handled in ansible.module_utils.docker pass from ansible.module_utils.docker_common import DockerBaseClass, AnsibleDockerClient from ansible.module_utils.six import iteritems, text_type class TaskParameters(DockerBaseClass): def __init__(self, client): super(TaskParameters, self).__init__() self.client = client self.volume_name = None self.driver = None self.driver_options = None self.labels = None self.force = None self.debug = None for key, value in iteritems(client.module.params): setattr(self, key, value) class DockerVolumeManager(object): def __init__(self, client): self.client = client self.parameters = TaskParameters(client) self.check_mode = self.client.check_mode self.results = { u'changed': False, u'actions': [] } self.diff = self.client.module._diff self.existing_volume = self.get_existing_volume() state = self.parameters.state if state == 'present': self.present() elif state == 'absent': self.absent() def get_existing_volume(self): try: volumes = self.client.volumes() except APIError as e: self.client.fail(text_type(e)) if volumes[u'Volumes'] is None: return None for volume in volumes[u'Volumes']: if volume['Name'] == self.parameters.volume_name: return volume return None def has_different_config(self): """ Return the list of differences between the current parameters and the existing volume. :return: list of options that differ """ differences = [] if self.parameters.driver and self.parameters.driver != self.existing_volume['Driver']: differences.append('driver') if self.parameters.driver_options: if not self.existing_volume.get('Options'): differences.append('driver_options') else: for key, value in iteritems(self.parameters.driver_options): if (not self.existing_volume['Options'].get(key) or value != self.existing_volume['Options'][key]): differences.append('driver_options.%s' % key) if self.parameters.labels: existing_labels = self.existing_volume.get('Labels', {}) all_labels = set(self.parameters.labels) | set(existing_labels) for label in all_labels: if existing_labels.get(label) != self.parameters.labels.get(label): differences.append('labels.%s' % label) return differences def create_volume(self): if not self.existing_volume: if not self.check_mode: try: resp = self.client.create_volume(self.parameters.volume_name, driver=self.parameters.driver, driver_opts=self.parameters.driver_options, labels=self.parameters.labels) self.existing_volume = self.client.inspect_volume(resp['Name']) except APIError as e: self.client.fail(text_type(e)) self.results['actions'].append("Created volume %s with driver %s" % (self.parameters.volume_name, self.parameters.driver)) self.results['changed'] = True def remove_volume(self): if self.existing_volume: if not self.check_mode: try: self.client.remove_volume(self.parameters.volume_name) except APIError as e: self.client.fail(text_type(e)) self.results['actions'].append("Removed volume %s" % self.parameters.volume_name) self.results['changed'] = True def present(self): differences = [] if self.existing_volume: differences = self.has_different_config() if differences and self.parameters.force: self.remove_volume() self.existing_volume = None self.create_volume() if self.diff or self.check_mode or self.parameters.debug: self.results['diff'] = differences if not self.check_mode and not self.parameters.debug: self.results.pop('actions') self.results['ansible_facts'] = {u'docker_volume': self.get_existing_volume()} def absent(self): self.remove_volume() def main(): argument_spec = dict( volume_name=dict(type='str', required=True, aliases=['name']), state=dict(type='str', default='present', choices=['present', 'absent']), driver=dict(type='str', default='local'), driver_options=dict(type='dict', default={}), labels=dict(type='list'), force=dict(type='bool', default=False), debug=dict(type='bool', default=False) ) client = AnsibleDockerClient( argument_spec=argument_spec, supports_check_mode=True ) cm = DockerVolumeManager(client) client.module.exit_json(**cm.results) if __name__ == '__main__': main()
gpl-3.0
ntt-sic/python-cinderclient
cinderclient/tests/v1/test_quota_classes.py
6
1448
# Copyright (c) 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from cinderclient.tests import utils from cinderclient.tests.v1 import fakes cs = fakes.FakeClient() class QuotaClassSetsTest(utils.TestCase): def test_class_quotas_get(self): class_name = 'test' cs.quota_classes.get(class_name) cs.assert_called('GET', '/os-quota-class-sets/%s' % class_name) def test_update_quota(self): q = cs.quota_classes.get('test') q.update(volumes=2, snapshots=2) cs.assert_called('PUT', '/os-quota-class-sets/test') def test_refresh_quota(self): q = cs.quota_classes.get('test') q2 = cs.quota_classes.get('test') self.assertEqual(q.volumes, q2.volumes) q2.volumes = 0 self.assertNotEqual(q.volumes, q2.volumes) q2.get() self.assertEqual(q.volumes, q2.volumes)
apache-2.0
Cloudino/Arduino
arduino-core/src/processing/app/i18n/python/requests/packages/charade/euckrprober.py
2931
1675
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import EUCKRDistributionAnalysis from .mbcssm import EUCKRSMModel class EUCKRProber(MultiByteCharSetProber): def __init__(self): MultiByteCharSetProber.__init__(self) self._mCodingSM = CodingStateMachine(EUCKRSMModel) self._mDistributionAnalyzer = EUCKRDistributionAnalysis() self.reset() def get_charset_name(self): return "EUC-KR"
lgpl-2.1
Elneo-group/account-invoicing
account_invoice_line_sort/models/account_invoice.py
26
4940
# -*- coding: utf-8 -*- ############################################################################## # This file is part of account_invoice_line_sort, an Odoo module. # # Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>) # # account_invoice_line_sort is free software: you can redistribute it # and/or modify it under the terms of the GNU Affero General Public License # as published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # account_invoice_line_sort is distributed in the hope that it will # be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the # GNU Affero General Public License # along with account_invoice_line_sort. # If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, fields, api from operator import attrgetter AVAILABLE_SORT_OPTIONS = [ ('sequence', 'Sequence'), ('name', 'Description'), ('price_unit', 'Unit Price'), ('price_subtotal', 'Amount'), ] AVAILABLE_ORDER_OPTIONS = [ ('asc', 'Ascending'), ('desc', 'Descending') ] class account_invoice(models.Model): _inherit = "account.invoice" _sort_trigger_fields = ('line_order', 'line_order_direction') line_order = fields.Selection(AVAILABLE_SORT_OPTIONS, "Sort Lines By", default='sequence') line_order_direction = fields.Selection(AVAILABLE_ORDER_OPTIONS, "Sort Direction", default='asc') @api.model def get_partner_sort_options(self, partner_id): res = {} if partner_id: p = self.env['res.partner'].browse(partner_id) res['line_order'] = p.line_order res['line_order_direction'] = p.line_order_direction return res @api.multi def onchange_partner_id(self, type, partner_id, date_invoice=False, payment_term=False, partner_bank_id=False, company_id=False): res = super(account_invoice, self).onchange_partner_id(type, partner_id, date_invoice=date_invoice, payment_term=payment_term, partner_bank_id=partner_bank_id, company_id=company_id) if partner_id: res['value'].update(self.get_partner_sort_options(partner_id)) return res @api.one def _sort_account_invoice_line(self): if self.invoice_line: sequence = 0 key = attrgetter(self.line_order) reverse = self.line_order_direction == 'desc' for line in self.invoice_line.sorted(key=key, reverse=reverse): sequence += 10 line.sequence = sequence @api.multi def write(self, vals): sort = False fields = [key for key in vals if key in self._sort_trigger_fields] if fields: if [key for key in fields if vals[key] != self[key]]: sort = True res = super(account_invoice, self).write(vals) if sort or 'invoice_line' in vals: self._sort_account_invoice_line() return res @api.model @api.returns('self', lambda value: value.id) def create(self, vals): if not [key for key in vals if key in self._sort_trigger_fields]: partner_id = vals.get('partner_id', False) vals.update(self.get_partner_sort_options(partner_id)) invoice = super(account_invoice, self).create(vals) invoice._sort_account_invoice_line() return invoice class account_invoice_line(models.Model): _inherit = "account.invoice.line" _sort_trigger_fields = ('name', 'quantity', 'price_unit', 'discount') @api.multi def write(self, vals): sort = False fields = [key for key in vals if key in self._sort_trigger_fields] if fields: if [key for key in fields if vals[key] != self[key]]: sort = True res = super(account_invoice_line, self).write(vals) if sort: self.invoice_id._sort_account_invoice_line() return res @api.model @api.returns('self', lambda value: value.id) def create(self, vals): line = super(account_invoice_line, self).create(vals) self.invoice_id._sort_account_invoice_line() return line
agpl-3.0
GeraldLoeffler/nupic
tests/unit/nupic/encoders/coordinate_test.py
7
11676
#!/usr/bin/env python # ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2014, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import capnp # For import hook import numpy as np import tempfile import unittest from mock import patch from nupic.encoders.base import defaultDtype from nupic.encoders.coordinate import CoordinateEncoder from nupic.encoders.coordinate_capnp import CoordinateEncoderProto # Disable warnings about accessing protected members # pylint: disable=W0212 class CoordinateEncoderTest(unittest.TestCase): """Unit tests for CoordinateEncoder class""" def setUp(self): self.encoder = CoordinateEncoder(name="coordinate", n=33, w=3) def testInvalidW(self): # Even args = {"name": "coordinate", "n": 45, "w": 4} self.assertRaises(ValueError, CoordinateEncoder, **args) # 0 args = {"name": "coordinate", "n": 45, "w": 0} self.assertRaises(ValueError, CoordinateEncoder, **args) # Negative args = {"name": "coordinate", "n": 45, "w": -2} self.assertRaises(ValueError, CoordinateEncoder, **args) def testInvalidN(self): # Too small args = {"name": "coordinate", "n": 11, "w": 3} self.assertRaises(ValueError, CoordinateEncoder, **args) def testOrderForCoordinate(self): h1 = self.encoder._orderForCoordinate(np.array([2, 5, 10])) h2 = self.encoder._orderForCoordinate(np.array([2, 5, 11])) h3 = self.encoder._orderForCoordinate(np.array([2497477, -923478])) self.assertTrue(0 <= h1 and h1 < 1) self.assertTrue(0 <= h2 and h2 < 1) self.assertTrue(0 <= h3 and h3 < 1) self.assertTrue(h1 != h2) self.assertTrue(h2 != h3) def testBitForCoordinate(self): n = 1000 b1 = self.encoder._bitForCoordinate(np.array([2, 5, 10]), n) b2 = self.encoder._bitForCoordinate(np.array([2, 5, 11]), n) b3 = self.encoder._bitForCoordinate(np.array([2497477, -923478]), n) self.assertTrue(0 <= b1 and b1 < n) self.assertTrue(0 <= b2 and b2 < n) self.assertTrue(0 <= b3 and b3 < n) self.assertTrue(b1 != b2) self.assertTrue(b2 != b3) # Small n n = 2 b4 = self.encoder._bitForCoordinate(np.array([5, 10]), n) self.assertTrue(0 <= b4 < n) @patch.object(CoordinateEncoder, "_orderForCoordinate") def testTopWCoordinates(self, mockOrderForCoordinate): # Mock orderForCoordinate mockFn = lambda coordinate: np.sum(coordinate) / 5.0 mockOrderForCoordinate.side_effect = mockFn coordinates = np.array([[1], [2], [3], [4], [5]]) top = self.encoder._topWCoordinates(coordinates, 2).tolist() self.assertEqual(len(top), 2) self.assertIn([5], top) self.assertIn([4], top) def testNeighbors1D(self): coordinate = np.array([100]) radius = 5 neighbors = self.encoder._neighbors(coordinate, radius).tolist() self.assertEqual(len(neighbors), 11) self.assertIn([95], neighbors) self.assertIn([100], neighbors) self.assertIn([105], neighbors) def testNeighbors2D(self): coordinate = np.array([100, 200]) radius = 5 neighbors = self.encoder._neighbors(coordinate, radius).tolist() self.assertEqual(len(neighbors), 121) self.assertIn([95, 195], neighbors) self.assertIn([95, 205], neighbors) self.assertIn([100, 200], neighbors) self.assertIn([105, 195], neighbors) self.assertIn([105, 205], neighbors) def testNeighbors0Radius(self): coordinate = np.array([100, 200, 300]) radius = 0 neighbors = self.encoder._neighbors(coordinate, radius).tolist() self.assertEqual(len(neighbors), 1) self.assertIn([100, 200, 300], neighbors) def testEncodeIntoArray(self): n = 33 w = 3 encoder = CoordinateEncoder(name="coordinate", n=n, w=w) coordinate = np.array([100, 200]) radius = 5 output1 = encode(encoder, coordinate, radius) self.assertEqual(np.sum(output1), w) # Test that we get the same output for the same input output2 = encode(encoder, coordinate, radius) self.assertTrue(np.array_equal(output2, output1)) def testEncodeSaturateArea(self): n = 1999 w = 25 encoder = CoordinateEncoder(name="coordinate", n=n, w=w) outputA = encode(encoder, np.array([0, 0]), 2) outputB = encode(encoder, np.array([0, 1]), 2) self.assertEqual(overlap(outputA, outputB), 0.8) def testEncodeRelativePositions(self): # As you get farther from a coordinate, the overlap should decrease overlaps = overlapsForRelativeAreas(999, 51, np.array([100, 200]), 10, dPosition=np.array([2, 2]), num=5) self.assertDecreasingOverlaps(overlaps) def testEncodeRelativeRadii(self): # As radius increases, the overlap should decrease overlaps = overlapsForRelativeAreas(999, 25, np.array([100, 200]), 5, dRadius=2, num=5) self.assertDecreasingOverlaps(overlaps) # As radius decreases, the overlap should decrease overlaps = overlapsForRelativeAreas(999, 51, np.array([100, 200]), 20, dRadius=-2, num=5) self.assertDecreasingOverlaps(overlaps) def testEncodeRelativePositionsAndRadii(self): # As radius increases and positions change, the overlap should decrease overlaps = overlapsForRelativeAreas(999, 25, np.array([100, 200]), 5, dPosition=np.array([1, 1]), dRadius=1, num=5) self.assertDecreasingOverlaps(overlaps) def testEncodeUnrelatedAreas(self): """ assert unrelated areas don"t share bits (outside of chance collisions) """ avgThreshold = 0.3 maxThreshold = 0.12 overlaps = overlapsForUnrelatedAreas(1499, 37, 5) self.assertLess(np.max(overlaps), maxThreshold) self.assertLess(np.average(overlaps), avgThreshold) maxThreshold = 0.12 overlaps = overlapsForUnrelatedAreas(1499, 37, 10) self.assertLess(np.max(overlaps), maxThreshold) self.assertLess(np.average(overlaps), avgThreshold) maxThreshold = 0.17 overlaps = overlapsForUnrelatedAreas(999, 25, 10) self.assertLess(np.max(overlaps), maxThreshold) self.assertLess(np.average(overlaps), avgThreshold) maxThreshold = 0.25 overlaps = overlapsForUnrelatedAreas(499, 13, 10) self.assertLess(np.max(overlaps), maxThreshold) self.assertLess(np.average(overlaps), avgThreshold) def testEncodeAdjacentPositions(self, verbose=False): repetitions = 100 n = 999 w = 25 radius = 10 minThreshold = 0.75 avgThreshold = 0.90 allOverlaps = np.empty(repetitions) for i in range(repetitions): overlaps = overlapsForRelativeAreas(n, w, np.array([i * 10, i * 10]), radius, dPosition=np.array([0, 1]), num=1) allOverlaps[i] = overlaps[0] self.assertGreater(np.min(allOverlaps), minThreshold) self.assertGreater(np.average(allOverlaps), avgThreshold) if verbose: print ("===== Adjacent positions overlap " "(n = {0}, w = {1}, radius = {2}) ===").format(n, w, radius) print "Max: {0}".format(np.max(allOverlaps)) print "Min: {0}".format(np.min(allOverlaps)) print "Average: {0}".format(np.average(allOverlaps)) def assertDecreasingOverlaps(self, overlaps): self.assertEqual((np.diff(overlaps) >= 0).sum(), 0) def testReadWrite(self): coordinate = np.array([100, 200]) radius = 5 output1 = encode(self.encoder, coordinate, radius) proto1 = CoordinateEncoderProto.new_message() self.encoder.write(proto1) # Write the proto to a temp file and read it back into a new proto with tempfile.TemporaryFile() as f: proto1.write(f) f.seek(0) proto2 = CoordinateEncoderProto.read(f) encoder = CoordinateEncoder.read(proto2) self.assertIsInstance(encoder, CoordinateEncoder) self.assertEqual(encoder.w, self.encoder.w) self.assertEqual(encoder.n, self.encoder.n) self.assertEqual(encoder.name, self.encoder.name) self.assertEqual(encoder.verbosity, self.encoder.verbosity) coordinate = np.array([100, 200]) radius = 5 output2 = encode(encoder, coordinate, radius) self.assertTrue(np.array_equal(output1, output2)) def encode(encoder, coordinate, radius): output = np.zeros(encoder.getWidth(), dtype=defaultDtype) encoder.encodeIntoArray((coordinate, radius), output) return output def overlap(sdr1, sdr2): assert sdr1.size == sdr2.size return float((sdr1 & sdr2).sum()) / sdr1.sum() def overlapsForRelativeAreas(n, w, initPosition, initRadius, dPosition=None, dRadius=0, num=100, verbose=False): """ Return overlaps between an encoding and other encodings relative to it :param n: the size of the encoder output :param w: the number of active bits in the encoder output :param initPosition: the position of the first encoding :param initRadius: the radius of the first encoding :param dPosition: the offset to apply to each subsequent position :param dRadius: the offset to apply to each subsequent radius :param num: the number of encodings to generate :param verbose: whether to print verbose output """ encoder = CoordinateEncoder(name="coordinate", n=n, w=w) overlaps = np.empty(num) outputA = encode(encoder, np.array(initPosition), initRadius) for i in range(num): newPosition = initPosition if dPosition == None else ( initPosition + (i + 1) * dPosition) newRadius = initRadius + (i + 1) * dRadius outputB = encode(encoder, newPosition, newRadius) overlaps[i] = overlap(outputA, outputB) if verbose: print print ("===== Relative encoding overlaps (n = {0}, w = {1}, " "initPosition = {2}, initRadius = {3}, " "dPosition = {4}, dRadius = {5}) =====").format( n, w, initPosition, initRadius, dPosition, dRadius) print "Average: {0}".format(np.average(overlaps)) print "Max: {0}".format(np.max(overlaps)) return overlaps def overlapsForUnrelatedAreas(n, w, radius, repetitions=100, verbose=False): """ Return overlaps between an encoding and other, unrelated encodings """ return overlapsForRelativeAreas(n, w, np.array([0, 0]), radius, dPosition=np.array([0, radius * 10]), num=repetitions, verbose=verbose) if __name__ == "__main__": unittest.main()
agpl-3.0
F-AOSP/platform_external_skia
platform_tools/android/bin/download_utils.py
149
8464
#!/usr/bin/python # Copyright (c) 2012 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A library to assist automatically downloading files. This library is used by scripts that download tarballs, zipfiles, etc. as part of the build process. """ import hashlib import http_download import os.path import re import shutil import sys import time import urllib2 SOURCE_STAMP = 'SOURCE_URL' HASH_STAMP = 'SOURCE_SHA1' # Designed to handle more general inputs than sys.platform because the platform # name may come from the command line. PLATFORM_COLLAPSE = { 'windows': 'windows', 'win32': 'windows', 'cygwin': 'windows', 'linux': 'linux', 'linux2': 'linux', 'linux3': 'linux', 'darwin': 'mac', 'mac': 'mac', } ARCH_COLLAPSE = { 'i386' : 'x86', 'i686' : 'x86', 'x86_64': 'x86', 'armv7l': 'arm', } class HashError(Exception): def __init__(self, download_url, expected_hash, actual_hash): self.download_url = download_url self.expected_hash = expected_hash self.actual_hash = actual_hash def __str__(self): return 'Got hash "%s" but expected hash "%s" for "%s"' % ( self.actual_hash, self.expected_hash, self.download_url) def PlatformName(name=None): if name is None: name = sys.platform return PLATFORM_COLLAPSE[name] def ArchName(name=None): if name is None: if PlatformName() == 'windows': # TODO(pdox): Figure out how to auto-detect 32-bit vs 64-bit Windows. name = 'i386' else: import platform name = platform.machine() return ARCH_COLLAPSE[name] def EnsureFileCanBeWritten(filename): directory = os.path.dirname(filename) if not os.path.exists(directory): os.makedirs(directory) def WriteData(filename, data): EnsureFileCanBeWritten(filename) f = open(filename, 'wb') f.write(data) f.close() def WriteDataFromStream(filename, stream, chunk_size, verbose=True): EnsureFileCanBeWritten(filename) dst = open(filename, 'wb') try: while True: data = stream.read(chunk_size) if len(data) == 0: break dst.write(data) if verbose: # Indicate that we're still writing. sys.stdout.write('.') sys.stdout.flush() finally: if verbose: sys.stdout.write('\n') dst.close() def DoesStampMatch(stampfile, expected, index): try: f = open(stampfile, 'r') stamp = f.read() f.close() if stamp.split('\n')[index] == expected: return "already up-to-date." elif stamp.startswith('manual'): return "manual override." return False except IOError: return False def WriteStamp(stampfile, data): EnsureFileCanBeWritten(stampfile) f = open(stampfile, 'w') f.write(data) f.close() def StampIsCurrent(path, stamp_name, stamp_contents, min_time=None, index=0): stampfile = os.path.join(path, stamp_name) # Check if the stampfile is older than the minimum last mod time if min_time: try: stamp_time = os.stat(stampfile).st_mtime if stamp_time <= min_time: return False except OSError: return False return DoesStampMatch(stampfile, stamp_contents, index) def WriteSourceStamp(path, url): stampfile = os.path.join(path, SOURCE_STAMP) WriteStamp(stampfile, url) def WriteHashStamp(path, hash_val): hash_stampfile = os.path.join(path, HASH_STAMP) WriteStamp(hash_stampfile, hash_val) def Retry(op, *args): # Windows seems to be prone to having commands that delete files or # directories fail. We currently do not have a complete understanding why, # and as a workaround we simply retry the command a few times. # It appears that file locks are hanging around longer than they should. This # may be a secondary effect of processes hanging around longer than they # should. This may be because when we kill a browser sel_ldr does not exit # immediately, etc. # Virus checkers can also accidently prevent files from being deleted, but # that shouldn't be a problem on the bots. if sys.platform in ('win32', 'cygwin'): count = 0 while True: try: op(*args) break except Exception: sys.stdout.write("FAILED: %s %s\n" % (op.__name__, repr(args))) count += 1 if count < 5: sys.stdout.write("RETRY: %s %s\n" % (op.__name__, repr(args))) time.sleep(pow(2, count)) else: # Don't mask the exception. raise else: op(*args) def MoveDirCleanly(src, dst): RemoveDir(dst) MoveDir(src, dst) def MoveDir(src, dst): Retry(shutil.move, src, dst) def RemoveDir(path): if os.path.exists(path): Retry(shutil.rmtree, path) def RemoveFile(path): if os.path.exists(path): Retry(os.unlink, path) def _HashFileHandle(fh): """sha1 of a file like object. Arguments: fh: file handle like object to hash. Returns: sha1 as a string. """ hasher = hashlib.sha1() try: while True: data = fh.read(4096) if not data: break hasher.update(data) finally: fh.close() return hasher.hexdigest() def HashFile(filename): """sha1 a file on disk. Arguments: filename: filename to hash. Returns: sha1 as a string. """ fh = open(filename, 'rb') return _HashFileHandle(fh) def HashUrlByDownloading(url): """sha1 the data at an url. Arguments: url: url to download from. Returns: sha1 of the data at the url. """ try: fh = urllib2.urlopen(url) except: sys.stderr.write("Failed fetching URL: %s\n" % url) raise return _HashFileHandle(fh) # Attempts to get the SHA1 hash of a file given a URL by looking for # an adjacent file with a ".sha1hash" suffix. This saves having to # download a large tarball just to get its hash. Otherwise, we fall # back to downloading the main file. def HashUrl(url): hash_url = '%s.sha1hash' % url try: fh = urllib2.urlopen(hash_url) data = fh.read(100) fh.close() except urllib2.HTTPError, exn: if exn.code == 404: return HashUrlByDownloading(url) raise else: if not re.match('[0-9a-f]{40}\n?$', data): raise AssertionError('Bad SHA1 hash file: %r' % data) return data.strip() def SyncURL(url, filename=None, stamp_dir=None, min_time=None, hash_val=None, keep=False, verbose=False, stamp_index=0): """Synchronize a destination file with a URL if the URL does not match the URL stamp, then we must re-download it. Arugments: url: the url which will to compare against and download filename: the file to create on download path: the download path stamp_dir: the filename containing the URL stamp to check against hash_val: if set, the expected hash which must be matched verbose: prints out status as it runs stamp_index: index within the stamp file to check. Returns: True if the file is replaced False if the file is not replaced Exception: HashError: if the hash does not match """ assert url and filename # If we are not keeping the tarball, or we already have it, we can # skip downloading it for this reason. If we are keeping it, # it must exist. if keep: tarball_ok = os.path.isfile(filename) else: tarball_ok = True # If we don't need the tarball and the stamp_file matches the url, then # we must be up to date. If the URL differs but the recorded hash matches # the one we'll insist the tarball has, then that's good enough too. # TODO(mcgrathr): Download the .sha1sum file first to compare with # the cached hash, in case --file-hash options weren't used. if tarball_ok and stamp_dir is not None: if StampIsCurrent(stamp_dir, SOURCE_STAMP, url, min_time): if verbose: print '%s is already up to date.' % filename return False if (hash_val is not None and StampIsCurrent(stamp_dir, HASH_STAMP, hash_val, min_time, stamp_index)): if verbose: print '%s is identical to the up to date file.' % filename return False if verbose: print 'Updating %s\n\tfrom %s.' % (filename, url) EnsureFileCanBeWritten(filename) http_download.HttpDownload(url, filename) if hash_val: tar_hash = HashFile(filename) if hash_val != tar_hash: raise HashError(actual_hash=tar_hash, expected_hash=hash_val, download_url=url) return True
bsd-3-clause
timduru/platform_external_chromium
net/tools/testserver/device_management.py
66
23833
#!/usr/bin/python2.5 # Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A bare-bones test server for testing cloud policy support. This implements a simple cloud policy test server that can be used to test chrome's device management service client. The policy information is read from the file named device_management in the server's data directory. It contains enforced and recommended policies for the device and user scope, and a list of managed users. The format of the file is JSON. The root dictionary contains a list under the key "managed_users". It contains auth tokens for which the server will claim that the user is managed. The token string "*" indicates that all users are claimed to be managed. Other keys in the root dictionary identify request scopes. Each request scope is described by a dictionary that holds two sub-dictionaries: "mandatory" and "recommended". Both these hold the policy definitions as key/value stores, their format is identical to what the Linux implementation reads from /etc. Example: { "chromeos/device": { "mandatory": { "HomepageLocation" : "http://www.chromium.org" }, "recommended": { "JavascriptEnabled": false, }, }, "managed_users": [ "secret123456" ] } """ import cgi import logging import os import random import re import sys import time import tlslite import tlslite.api import tlslite.utils # The name and availability of the json module varies in python versions. try: import simplejson as json except ImportError: try: import json except ImportError: json = None import asn1der import device_management_backend_pb2 as dm import cloud_policy_pb2 as cp import chrome_device_policy_pb2 as dp # ASN.1 object identifier for PKCS#1/RSA. PKCS1_RSA_OID = '\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01' class RequestHandler(object): """Decodes and handles device management requests from clients. The handler implements all the request parsing and protobuf message decoding and encoding. It calls back into the server to lookup, register, and unregister clients. """ def __init__(self, server, path, headers, request): """Initialize the handler. Args: server: The TestServer object to use for (un)registering clients. path: A string containing the request path and query parameters. headers: A rfc822.Message-like object containing HTTP headers. request: The request data received from the client as a string. """ self._server = server self._path = path self._headers = headers self._request = request self._params = None def GetUniqueParam(self, name): """Extracts a unique query parameter from the request. Args: name: Names the parameter to fetch. Returns: The parameter value or None if the parameter doesn't exist or is not unique. """ if not self._params: self._params = cgi.parse_qs(self._path[self._path.find('?') + 1:]) param_list = self._params.get(name, []) if len(param_list) == 1: return param_list[0] return None; def HandleRequest(self): """Handles a request. Parses the data supplied at construction time and returns a pair indicating http status code and response data to be sent back to the client. Returns: A tuple of HTTP status code and response data to send to the client. """ rmsg = dm.DeviceManagementRequest() rmsg.ParseFromString(self._request) logging.debug('auth -> ' + self._headers.getheader('Authorization', '')) logging.debug('deviceid -> ' + self.GetUniqueParam('deviceid')) self.DumpMessage('Request', rmsg) request_type = self.GetUniqueParam('request') # Check server side requirements, as defined in # device_management_backend.proto. if (self.GetUniqueParam('devicetype') != '2' or self.GetUniqueParam('apptype') != 'Chrome' or (request_type != 'ping' and len(self.GetUniqueParam('deviceid')) >= 64) or len(self.GetUniqueParam('agent')) >= 64): return (400, 'Invalid request parameter') if request_type == 'register': return self.ProcessRegister(rmsg.register_request) elif request_type == 'unregister': return self.ProcessUnregister(rmsg.unregister_request) elif request_type == 'policy' or request_type == 'ping': return self.ProcessPolicy(rmsg.policy_request, request_type) else: return (400, 'Invalid request parameter') def CheckGoogleLogin(self): """Extracts the GoogleLogin auth token from the HTTP request, and returns it. Returns None if the token is not present. """ match = re.match('GoogleLogin auth=(\\w+)', self._headers.getheader('Authorization', '')) if not match: return None return match.group(1) def ProcessRegister(self, msg): """Handles a register request. Checks the query for authorization and device identifier, registers the device with the server and constructs a response. Args: msg: The DeviceRegisterRequest message received from the client. Returns: A tuple of HTTP status code and response data to send to the client. """ # Check the auth token and device ID. if not self.CheckGoogleLogin(): return (403, 'No authorization') device_id = self.GetUniqueParam('deviceid') if not device_id: return (400, 'Missing device identifier') token_info = self._server.RegisterDevice(device_id, msg.machine_id, msg.type) # Send back the reply. response = dm.DeviceManagementResponse() response.register_response.device_management_token = ( token_info['device_token']) response.register_response.machine_name = token_info['machine_name'] self.DumpMessage('Response', response) return (200, response.SerializeToString()) def ProcessUnregister(self, msg): """Handles a register request. Checks for authorization, unregisters the device and constructs the response. Args: msg: The DeviceUnregisterRequest message received from the client. Returns: A tuple of HTTP status code and response data to send to the client. """ # Check the management token. token, response = self.CheckToken(); if not token: return response # Unregister the device. self._server.UnregisterDevice(token); # Prepare and send the response. response = dm.DeviceManagementResponse() response.unregister_response.CopyFrom(dm.DeviceUnregisterResponse()) self.DumpMessage('Response', response) return (200, response.SerializeToString()) def ProcessInitialPolicy(self, msg): """Handles a 'preregister policy' request. Queries the list of managed users and responds the client if their user is managed or not. Args: msg: The PolicyFetchRequest message received from the client. Returns: A tuple of HTTP status code and response data to send to the client. """ # Check the GAIA token. auth = self.CheckGoogleLogin() if not auth: return (403, 'No authorization') chrome_initial_settings = dm.ChromeInitialSettingsProto() if ('*' in self._server.policy['managed_users'] or auth in self._server.policy['managed_users']): chrome_initial_settings.enrollment_provision = ( dm.ChromeInitialSettingsProto.MANAGED); else: chrome_initial_settings.enrollment_provision = ( dm.ChromeInitialSettingsProto.UNMANAGED); policy_data = dm.PolicyData() policy_data.policy_type = msg.policy_type policy_data.policy_value = chrome_initial_settings.SerializeToString() # Prepare and send the response. response = dm.DeviceManagementResponse() fetch_response = response.policy_response.response.add() fetch_response.policy_data = ( policy_data.SerializeToString()) self.DumpMessage('Response', response) return (200, response.SerializeToString()) def ProcessDevicePolicy(self, msg): """Handles a policy request that uses the deprecated protcol. TODO(gfeher): Remove this when we certainly don't need it. Checks for authorization, encodes the policy into protobuf representation and constructs the response. Args: msg: The DevicePolicyRequest message received from the client. Returns: A tuple of HTTP status code and response data to send to the client. """ # Check the management token. token, response = self.CheckToken() if not token: return response # Stuff the policy dictionary into a response message and send it back. response = dm.DeviceManagementResponse() response.policy_response.CopyFrom(dm.DevicePolicyResponse()) # Respond only if the client requested policy for the cros/device scope, # since that's where chrome policy is supposed to live in. if msg.policy_scope == 'chromeos/device': policy = self._server.policy['google/chromeos/user']['mandatory'] setting = response.policy_response.setting.add() setting.policy_key = 'chrome-policy' policy_value = dm.GenericSetting() for (key, value) in policy.iteritems(): entry = policy_value.named_value.add() entry.name = key entry_value = dm.GenericValue() if isinstance(value, bool): entry_value.value_type = dm.GenericValue.VALUE_TYPE_BOOL entry_value.bool_value = value elif isinstance(value, int): entry_value.value_type = dm.GenericValue.VALUE_TYPE_INT64 entry_value.int64_value = value elif isinstance(value, str) or isinstance(value, unicode): entry_value.value_type = dm.GenericValue.VALUE_TYPE_STRING entry_value.string_value = value elif isinstance(value, list): entry_value.value_type = dm.GenericValue.VALUE_TYPE_STRING_ARRAY for list_entry in value: entry_value.string_array.append(str(list_entry)) entry.value.CopyFrom(entry_value) setting.policy_value.CopyFrom(policy_value) self.DumpMessage('Response', response) return (200, response.SerializeToString()) def ProcessPolicy(self, msg, request_type): """Handles a policy request. Checks for authorization, encodes the policy into protobuf representation and constructs the response. Args: msg: The DevicePolicyRequest message received from the client. Returns: A tuple of HTTP status code and response data to send to the client. """ if msg.request: for request in msg.request: if request.policy_type == 'google/chromeos/unregistered_user': if request_type != 'ping': return (400, 'Invalid request type') return self.ProcessInitialPolicy(request) elif (request.policy_type in ('google/chromeos/user', 'google/chromeos/device')): if request_type != 'policy': return (400, 'Invalid request type') return self.ProcessCloudPolicy(request) else: return (400, 'Invalid policy_type') else: return self.ProcessDevicePolicy(msg) def SetProtobufMessageField(self, group_message, field, field_value): '''Sets a field in a protobuf message. Args: group_message: The protobuf message. field: The field of the message to set, it shuold be a member of group_message.DESCRIPTOR.fields. field_value: The value to set. ''' if field.label == field.LABEL_REPEATED: assert type(field_value) == list entries = group_message.__getattribute__(field.name) for list_item in field_value: entries.append(list_item) return elif field.type == field.TYPE_BOOL: assert type(field_value) == bool elif field.type == field.TYPE_STRING: assert type(field_value) == str or type(field_value) == unicode elif field.type == field.TYPE_INT64: assert type(field_value) == int elif (field.type == field.TYPE_MESSAGE and field.message_type.name == 'StringList'): assert type(field_value) == list entries = group_message.__getattribute__(field.name).entries for list_item in field_value: entries.append(list_item) return else: raise Exception('Unknown field type %s' % field.type) group_message.__setattr__(field.name, field_value) def GatherDevicePolicySettings(self, settings, policies): '''Copies all the policies from a dictionary into a protobuf of type CloudDeviceSettingsProto. Args: settings: The destination ChromeDeviceSettingsProto protobuf. policies: The source dictionary containing policies in JSON format. ''' for group in settings.DESCRIPTOR.fields: # Create protobuf message for group. group_message = eval('dp.' + group.message_type.name + '()') # Indicates if at least one field was set in |group_message|. got_fields = False # Iterate over fields of the message and feed them from the # policy config file. for field in group_message.DESCRIPTOR.fields: field_value = None if field.name in policies: got_fields = True field_value = policies[field.name] self.SetProtobufMessageField(group_message, field, field_value) if got_fields: settings.__getattribute__(group.name).CopyFrom(group_message) def GatherUserPolicySettings(self, settings, policies): '''Copies all the policies from a dictionary into a protobuf of type CloudPolicySettings. Args: settings: The destination: a CloudPolicySettings protobuf. policies: The source: a dictionary containing policies under keys 'recommended' and 'mandatory'. ''' for group in settings.DESCRIPTOR.fields: # Create protobuf message for group. group_message = eval('cp.' + group.message_type.name + '()') # We assume that this policy group will be recommended, and only switch # it to mandatory if at least one of its members is mandatory. group_message.policy_options.mode = cp.PolicyOptions.RECOMMENDED # Indicates if at least one field was set in |group_message|. got_fields = False # Iterate over fields of the message and feed them from the # policy config file. for field in group_message.DESCRIPTOR.fields: field_value = None if field.name in policies['mandatory']: group_message.policy_options.mode = cp.PolicyOptions.MANDATORY field_value = policies['mandatory'][field.name] elif field.name in policies['recommended']: field_value = policies['recommended'][field.name] if field_value != None: got_fields = True self.SetProtobufMessageField(group_message, field, field_value) if got_fields: settings.__getattribute__(group.name).CopyFrom(group_message) def ProcessCloudPolicy(self, msg): """Handles a cloud policy request. (New protocol for policy requests.) Checks for authorization, encodes the policy into protobuf representation, signs it and constructs the repsonse. Args: msg: The CloudPolicyRequest message received from the client. Returns: A tuple of HTTP status code and response data to send to the client. """ token_info, error = self.CheckToken() if not token_info: return error # Response is only given if the scope is specified in the config file. # Normally 'google/chromeos/device' and 'google/chromeos/user' should be # accepted. policy_value = '' if (msg.policy_type in token_info['allowed_policy_types'] and msg.policy_type in self._server.policy): if msg.policy_type == 'google/chromeos/user': settings = cp.CloudPolicySettings() self.GatherUserPolicySettings(settings, self._server.policy[msg.policy_type]) policy_value = settings.SerializeToString() elif msg.policy_type == 'google/chromeos/device': settings = dp.ChromeDeviceSettingsProto() self.GatherDevicePolicySettings(settings, self._server.policy[msg.policy_type]) policy_value = settings.SerializeToString() # Figure out the key we want to use. If multiple keys are configured, the # server will rotate through them in a round-robin fashion. signing_key = None req_key = None key_version = 1 nkeys = len(self._server.keys) if msg.signature_type == dm.PolicyFetchRequest.SHA1_RSA and nkeys > 0: if msg.public_key_version in range(1, nkeys + 1): # requested key exists, use for signing and rotate. req_key = self._server.keys[msg.public_key_version - 1]['private_key'] key_version = (msg.public_key_version % nkeys) + 1 signing_key = self._server.keys[key_version - 1] # Fill the policy data protobuf. policy_data = dm.PolicyData() policy_data.policy_type = msg.policy_type policy_data.timestamp = int(time.time() * 1000) policy_data.request_token = token_info['device_token']; policy_data.policy_value = policy_value policy_data.machine_name = token_info['machine_name'] if signing_key: policy_data.public_key_version = key_version policy_data.username = self._server.username policy_data.device_id = token_info['device_id'] signed_data = policy_data.SerializeToString() response = dm.DeviceManagementResponse() fetch_response = response.policy_response.response.add() fetch_response.policy_data = signed_data if signing_key: fetch_response.policy_data_signature = ( signing_key['private_key'].hashAndSign(signed_data).tostring()) if msg.public_key_version != key_version: fetch_response.new_public_key = signing_key['public_key'] if req_key: fetch_response.new_public_key_signature = ( req_key.hashAndSign(fetch_response.new_public_key).tostring()) self.DumpMessage('Response', response) return (200, response.SerializeToString()) def CheckToken(self): """Helper for checking whether the client supplied a valid DM token. Extracts the token from the request and passed to the server in order to look up the client. Returns: A pair of token information record and error response. If the first element is None, then the second contains an error code to send back to the client. Otherwise the first element is the same structure that is returned by LookupToken(). """ error = None dmtoken = None request_device_id = self.GetUniqueParam('deviceid') match = re.match('GoogleDMToken token=(\\w+)', self._headers.getheader('Authorization', '')) if match: dmtoken = match.group(1) if not dmtoken: error = dm.DeviceManagementResponse.DEVICE_MANAGEMENT_TOKEN_INVALID else: token_info = self._server.LookupToken(dmtoken) if (not token_info or not request_device_id or token_info['device_id'] != request_device_id): error = dm.DeviceManagementResponse.DEVICE_NOT_FOUND else: return (token_info, None) response = dm.DeviceManagementResponse() response.error = error self.DumpMessage('Response', response) return (None, (200, response.SerializeToString())) def DumpMessage(self, label, msg): """Helper for logging an ASCII dump of a protobuf message.""" logging.debug('%s\n%s' % (label, str(msg))) class TestServer(object): """Handles requests and keeps global service state.""" def __init__(self, policy_path, private_key_paths, policy_user): """Initializes the server. Args: policy_path: Names the file to read JSON-formatted policy from. private_key_paths: List of paths to read private keys from. """ self._registered_tokens = {} self.policy = {} # There is no way to for the testserver to know the user name belonging to # the GAIA auth token we received (short of actually talking to GAIA). To # address this, we have a command line parameter to set the username that # the server should report to the client. self.username = policy_user if json is None: print 'No JSON module, cannot parse policy information' else : try: self.policy = json.loads(open(policy_path).read()) except IOError: print 'Failed to load policy from %s' % policy_path self.keys = [] if private_key_paths: # Load specified keys from the filesystem. for key_path in private_key_paths: try: key = tlslite.api.parsePEMKey(open(key_path).read(), private=True) except IOError: print 'Failed to load private key from %s' % key_path continue assert key != None self.keys.append({ 'private_key' : key }) else: # Generate a key if none were specified. key = tlslite.api.generateRSAKey(1024) assert key != None self.keys.append({ 'private_key' : key }) # Derive the public keys from the loaded private keys. for entry in self.keys: key = entry['private_key'] algorithm = asn1der.Sequence( [ asn1der.Data(asn1der.OBJECT_IDENTIFIER, PKCS1_RSA_OID), asn1der.Data(asn1der.NULL, '') ]) rsa_pubkey = asn1der.Sequence([ asn1der.Integer(key.n), asn1der.Integer(key.e) ]) pubkey = asn1der.Sequence([ algorithm, asn1der.Bitstring(rsa_pubkey) ]) entry['public_key'] = pubkey; def HandleRequest(self, path, headers, request): """Handles a request. Args: path: The request path and query parameters received from the client. headers: A rfc822.Message-like object containing HTTP headers. request: The request data received from the client as a string. Returns: A pair of HTTP status code and response data to send to the client. """ handler = RequestHandler(self, path, headers, request) return handler.HandleRequest() def RegisterDevice(self, device_id, machine_id, type): """Registers a device or user and generates a DM token for it. Args: device_id: The device identifier provided by the client. Returns: The newly generated device token for the device. """ dmtoken_chars = [] while len(dmtoken_chars) < 32: dmtoken_chars.append(random.choice('0123456789abcdef')) dmtoken = ''.join(dmtoken_chars) allowed_policy_types = { dm.DeviceRegisterRequest.USER: ['google/chromeos/user'], dm.DeviceRegisterRequest.DEVICE: ['google/chromeos/device'], dm.DeviceRegisterRequest.TT: ['google/chromeos/user'], } self._registered_tokens[dmtoken] = { 'device_id': device_id, 'device_token': dmtoken, 'allowed_policy_types': allowed_policy_types[type], 'machine_name': 'chromeos-' + machine_id, } return self._registered_tokens[dmtoken] def LookupToken(self, dmtoken): """Looks up a device or a user by DM token. Args: dmtoken: The device management token provided by the client. Returns: A dictionary with information about a device or user that is registered by dmtoken, or None if the token is not found. """ return self._registered_tokens.get(dmtoken, None) def UnregisterDevice(self, dmtoken): """Unregisters a device identified by the given DM token. Args: dmtoken: The device management token provided by the client. """ if dmtoken in self._registered_tokens.keys(): del self._registered_tokens[dmtoken]
bsd-3-clause
nrwahl2/ansible
lib/ansible/modules/cloud/cloudstack/cs_resourcelimit.py
33
6063
#!/usr/bin/python # -*- coding: utf-8 -*- # # (c) 2016, René Moser <mail@renemoser.net> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: cs_resourcelimit short_description: Manages resource limits on Apache CloudStack based clouds. description: - Manage limits of resources for domains, accounts and projects. version_added: "2.1" author: "René Moser (@resmo)" options: resource_type: description: - Type of the resource. required: true choices: - instance - ip_address - volume - snapshot - template - network - vpc - cpu - memory - primary_storage - secondary_storage aliases: [ 'type' ] limit: description: - Maximum number of the resource. - Default is unlimited C(-1). required: false default: -1 aliases: [ 'max' ] domain: description: - Domain the resource is related to. required: false default: null account: description: - Account the resource is related to. required: false default: null project: description: - Name of the project the resource is related to. required: false default: null extends_documentation_fragment: cloudstack ''' EXAMPLES = ''' # Update a resource limit for instances of a domain local_action: module: cs_resourcelimit type: instance limit: 10 domain: customers # Update a resource limit for instances of an account local_action: module: cs_resourcelimit type: instance limit: 12 account: moserre domain: customers ''' RETURN = ''' --- recource_type: description: Type of the resource returned: success type: string sample: instance limit: description: Maximum number of the resource. returned: success type: int sample: -1 domain: description: Domain the resource is related to. returned: success type: string sample: example domain account: description: Account the resource is related to. returned: success type: string sample: example account project: description: Project the resource is related to. returned: success type: string sample: example project ''' # import cloudstack common from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.cloudstack import ( AnsibleCloudStack, cs_required_together, cs_argument_spec ) RESOURCE_TYPES = { 'instance': 0, 'ip_address': 1, 'volume': 2, 'snapshot': 3, 'template': 4, 'network': 6, 'vpc': 7, 'cpu': 8, 'memory': 9, 'primary_storage': 10, 'secondary_storage': 11, } class AnsibleCloudStackResourceLimit(AnsibleCloudStack): def __init__(self, module): super(AnsibleCloudStackResourceLimit, self).__init__(module) self.returns = { 'max': 'limit', } def get_resource_type(self): resource_type = self.module.params.get('resource_type') return RESOURCE_TYPES.get(resource_type) def get_resource_limit(self): args = { 'account': self.get_account(key='name'), 'domainid': self.get_domain(key='id'), 'projectid': self.get_project(key='id'), 'resourcetype': self.get_resource_type() } resource_limit = self.cs.listResourceLimits(**args) if resource_limit: if 'limit' in resource_limit['resourcelimit'][0]: resource_limit['resourcelimit'][0]['limit'] = int(resource_limit['resourcelimit'][0]) return resource_limit['resourcelimit'][0] self.module.fail_json(msg="Resource limit type '%s' not found." % self.module.params.get('resource_type')) def update_resource_limit(self): resource_limit = self.get_resource_limit() args = { 'account': self.get_account(key='name'), 'domainid': self.get_domain(key='id'), 'projectid': self.get_project(key='id'), 'resourcetype': self.get_resource_type(), 'max': self.module.params.get('limit', -1) } if self.has_changed(args, resource_limit): self.result['changed'] = True if not self.module.check_mode: res = self.cs.updateResourceLimit(**args) resource_limit = res['resourcelimit'] return resource_limit def get_result(self, resource_limit): self.result = super(AnsibleCloudStackResourceLimit, self).get_result(resource_limit) self.result['resource_type'] = self.module.params.get('resource_type') return self.result def main(): argument_spec = cs_argument_spec() argument_spec.update(dict( resource_type=dict(required=True, choices=RESOURCE_TYPES.keys(), aliases=['type']), limit=dict(default=-1, aliases=['max'], type='int'), domain=dict(), account=dict(), project=dict(), )) module = AnsibleModule( argument_spec=argument_spec, required_together=cs_required_together(), supports_check_mode=True ) acs_resource_limit = AnsibleCloudStackResourceLimit(module) resource_limit = acs_resource_limit.update_resource_limit() result = acs_resource_limit.get_result(resource_limit) module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
Fiedzia/Django-facebook
docs/docs_env/Lib/encodings/raw_unicode_escape.py
852
1208
""" Python 'raw-unicode-escape' Codec Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ import codecs ### Codec APIs class Codec(codecs.Codec): # Note: Binding these as C functions will result in the class not # converting them to methods. This is intended. encode = codecs.raw_unicode_escape_encode decode = codecs.raw_unicode_escape_decode class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.raw_unicode_escape_encode(input, self.errors)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.raw_unicode_escape_decode(input, self.errors)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='raw-unicode-escape', encode=Codec.encode, decode=Codec.decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamwriter=StreamWriter, streamreader=StreamReader, )
bsd-3-clause
chaen/DIRAC
DataManagementSystem/Client/CmdDirCompletion/AbstractFileSystem.py
4
2491
#!/usr/bin/env python # -*- coding:utf-8 -*- # author: lintao from __future__ import print_function class HelperReadOnly(object): def __init__(self, val): self.val = val def __get__(self, obj, objtype): return self.val def __set__(self, obj, val): raise AttributeError("can't modify attribute") class AbsFileSystem(object): fs_name = HelperReadOnly("AbsFileSystem") seq = HelperReadOnly("/") def list_dir(self, path): raise NotImplementedError def is_dir(self, path): raise NotImplementedError import os import os.path class UnixLikeFileSystem(AbsFileSystem): fs_name = HelperReadOnly("UnixLikeFileSystem") seq = HelperReadOnly("/") def list_dir(self, path): if not self.is_dir(path): raise StopIteration for entry in os.listdir(path): if self.is_dir( os.path.join(path, entry) ): entry += self.seq yield entry def is_dir(self, path): return os.path.isdir(path) pass class DFCFileSystem(AbsFileSystem): fs_name = HelperReadOnly("DFCFileSystem") seq = HelperReadOnly("/") def __init__(self, fc): self.fc = fc def list_dir(self, path): if path.endswith('/'): path = path.replace('//', '/') path = os.path.normpath(path) if not self.is_dir(path): print("It is not Directory") raise StopIteration result = self.fc.listDirectory(path, False) if not result['OK']: print("some errors.") raise StopIteration content = result['Value']['Successful'].get(path, False) if not content: raise StopIteration if content['Files']: for fn in content['Files']: yield self.gen_no_prefix_content(fn, path) if content['SubDirs']: for dn in content['SubDirs']: yield self.gen_no_prefix_content(dn, path) + "/" def gen_no_prefix_content(self, dn, parent_dn): subdn = dn if dn.startswith(parent_dn): # remove the prefix subdn = dn[ len(parent_dn): ] if subdn.startswith("/"): subdn = subdn[1:] return subdn def is_dir(self, path): if path.endswith('/'): path = path.replace('//', '/') path = os.path.normpath(path) result = self.fc.isDirectory(path) if not result['OK']: return False return result['Value']['Successful'].get(path, False) if __name__ == "__main__": ulfs = UnixLikeFileSystem() print("FS", ulfs.fs_name) print("SEQ", ulfs.seq) print(list(ulfs.list_dir("/"))) print(list(ulfs.list_dir("/bad")))
gpl-3.0
bloomberg/phabricator-tools
py/phl/phlsys_compressedlogging__t.py
4
4675
"""Test suite for phlsys_compressedlogging.""" # ============================================================================= # TEST PLAN # ----------------------------------------------------------------------------- # Here we detail the things we are concerned to test and specify which tests # cover those concerns. # # Concerns: # [ A] CompressedRotatingFileHandler can be initialzed # [ A] log file is created after initialization # [ A] the debug handler can be added to logger # [ A] no extra files are created by the debug handler # [ B] current log can be rotated to compressed one # [ B] current log is deleted after rotation # [ C] number of compressed files do not exceed backupCount # [ C] existing files are rotated correctly # ----------------------------------------------------------------------------- # Tests: # [ A] test_A_filehandler_breathing # [ B] test_B_rotation # [ C] test_C_existing_files_rotation # ============================================================================= from __future__ import absolute_import from __future__ import division from __future__ import print_function import gzip import logging import os import unittest import phlsys_compressedlogging import phlsys_fs _LOGGER = logging.getLogger(__name__) _MAKE_HANDLER = phlsys_compressedlogging.CompressedRotatingFileHandler class Test(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_A_filehandler_breathing(self): with phlsys_fs.chtmpdir_context(): # [ A] CompressedRotatingFileHandler can be initialzed debug_handler = _MAKE_HANDLER( 'testfile', maxBytes=10 * 1024, backupCount=2) debug_handler.setLevel(logging.DEBUG) # [ A] log file is created after initialization self.assertTrue(os.path.exists('testfile')) # [ A] the debug handler can be added to logger logging.getLogger().addHandler(debug_handler) for _ in xrange(1000): _LOGGER.debug('Hello World, this is conetent for debug log.') # [ A] no extra files are created by the debug handler expected_files = ['testfile', 'testfile.1.gz', 'testfile.2.gz'] self.assertItemsEqual(expected_files, os.listdir('.')) def test_B_rotation(self): with phlsys_fs.chtmpdir_context(): debug_handler = _MAKE_HANDLER( 'testfile') content = 'Hello World, this is a test for the rotator.' phlsys_fs.write_text_file('testfile', content) debug_handler.rotator('testfile', 'dest.gz') with gzip.open('dest.gz', 'rb') as dest: # [ B] current log can be rotated to compressed one self.assertEqual(content, dest.read()) # [ B] current log is deleted after rotation self.assertFalse(os.path.exists('testfile')) def test_C_existing_files_rotation(self): with phlsys_fs.chtmpdir_context(): debug_handler = _MAKE_HANDLER( 'testfile', backupCount=2) content1 = 'Hello World, this is conetent for testfile1.' content2 = 'Hello World, this is conetent for testfile2.' with gzip.open('testfile.1.gz', 'wb') as f: f.write(content1) with gzip.open('testfile.2.gz', 'wb') as f: f.write(content2) debug_handler.rotate_existing_files() # [ C] number of compressed files do not exceed backupCount self.assertFalse(os.path.exists('testfile.3.gz')) # [ C] existing files are rotated correctly self.assertFalse(os.path.exists('testfile.1.gz')) with gzip.open('testfile.2.gz', 'rb') as f: self.assertEqual(content1, f.read()) # ----------------------------------------------------------------------------- # Copyright (C) 2015 Bloomberg Finance L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------ END-OF-FILE ----------------------------------
apache-2.0
mancoast/CPythonPyc_test
fail/310_test_uuid.py
3
21025
from unittest import TestCase from test import support import uuid def importable(name): try: __import__(name) return True except: return False class TestUUID(TestCase): last_node = None source2node = {} def test_UUID(self): equal = self.assertEqual ascending = [] for (string, curly, hex, bytes, bytes_le, fields, integer, urn, time, clock_seq, variant, version) in [ ('00000000-0000-0000-0000-000000000000', '{00000000-0000-0000-0000-000000000000}', '00000000000000000000000000000000', b'\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', b'\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', (0, 0, 0, 0, 0, 0), 0, 'urn:uuid:00000000-0000-0000-0000-000000000000', 0, 0, uuid.RESERVED_NCS, None), ('00010203-0405-0607-0809-0a0b0c0d0e0f', '{00010203-0405-0607-0809-0a0b0c0d0e0f}', '000102030405060708090a0b0c0d0e0f', b'\0\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\x0d\x0e\x0f', b'\x03\x02\x01\0\x05\x04\x07\x06\x08\t\n\x0b\x0c\x0d\x0e\x0f', (0x00010203, 0x0405, 0x0607, 8, 9, 0x0a0b0c0d0e0f), 0x000102030405060708090a0b0c0d0e0f, 'urn:uuid:00010203-0405-0607-0809-0a0b0c0d0e0f', 0x607040500010203, 0x809, uuid.RESERVED_NCS, None), ('02d9e6d5-9467-382e-8f9b-9300a64ac3cd', '{02d9e6d5-9467-382e-8f9b-9300a64ac3cd}', '02d9e6d59467382e8f9b9300a64ac3cd', b'\x02\xd9\xe6\xd5\x94\x67\x38\x2e\x8f\x9b\x93\x00\xa6\x4a\xc3\xcd', b'\xd5\xe6\xd9\x02\x67\x94\x2e\x38\x8f\x9b\x93\x00\xa6\x4a\xc3\xcd', (0x02d9e6d5, 0x9467, 0x382e, 0x8f, 0x9b, 0x9300a64ac3cd), 0x02d9e6d59467382e8f9b9300a64ac3cd, 'urn:uuid:02d9e6d5-9467-382e-8f9b-9300a64ac3cd', 0x82e946702d9e6d5, 0xf9b, uuid.RFC_4122, 3), ('12345678-1234-5678-1234-567812345678', '{12345678-1234-5678-1234-567812345678}', '12345678123456781234567812345678', b'\x12\x34\x56\x78'*4, b'\x78\x56\x34\x12\x34\x12\x78\x56\x12\x34\x56\x78\x12\x34\x56\x78', (0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678), 0x12345678123456781234567812345678, 'urn:uuid:12345678-1234-5678-1234-567812345678', 0x678123412345678, 0x1234, uuid.RESERVED_NCS, None), ('6ba7b810-9dad-11d1-80b4-00c04fd430c8', '{6ba7b810-9dad-11d1-80b4-00c04fd430c8}', '6ba7b8109dad11d180b400c04fd430c8', b'\x6b\xa7\xb8\x10\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8', b'\x10\xb8\xa7\x6b\xad\x9d\xd1\x11\x80\xb4\x00\xc0\x4f\xd4\x30\xc8', (0x6ba7b810, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8), 0x6ba7b8109dad11d180b400c04fd430c8, 'urn:uuid:6ba7b810-9dad-11d1-80b4-00c04fd430c8', 0x1d19dad6ba7b810, 0xb4, uuid.RFC_4122, 1), ('6ba7b811-9dad-11d1-80b4-00c04fd430c8', '{6ba7b811-9dad-11d1-80b4-00c04fd430c8}', '6ba7b8119dad11d180b400c04fd430c8', b'\x6b\xa7\xb8\x11\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8', b'\x11\xb8\xa7\x6b\xad\x9d\xd1\x11\x80\xb4\x00\xc0\x4f\xd4\x30\xc8', (0x6ba7b811, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8), 0x6ba7b8119dad11d180b400c04fd430c8, 'urn:uuid:6ba7b811-9dad-11d1-80b4-00c04fd430c8', 0x1d19dad6ba7b811, 0xb4, uuid.RFC_4122, 1), ('6ba7b812-9dad-11d1-80b4-00c04fd430c8', '{6ba7b812-9dad-11d1-80b4-00c04fd430c8}', '6ba7b8129dad11d180b400c04fd430c8', b'\x6b\xa7\xb8\x12\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8', b'\x12\xb8\xa7\x6b\xad\x9d\xd1\x11\x80\xb4\x00\xc0\x4f\xd4\x30\xc8', (0x6ba7b812, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8), 0x6ba7b8129dad11d180b400c04fd430c8, 'urn:uuid:6ba7b812-9dad-11d1-80b4-00c04fd430c8', 0x1d19dad6ba7b812, 0xb4, uuid.RFC_4122, 1), ('6ba7b814-9dad-11d1-80b4-00c04fd430c8', '{6ba7b814-9dad-11d1-80b4-00c04fd430c8}', '6ba7b8149dad11d180b400c04fd430c8', b'\x6b\xa7\xb8\x14\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8', b'\x14\xb8\xa7\x6b\xad\x9d\xd1\x11\x80\xb4\x00\xc0\x4f\xd4\x30\xc8', (0x6ba7b814, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8), 0x6ba7b8149dad11d180b400c04fd430c8, 'urn:uuid:6ba7b814-9dad-11d1-80b4-00c04fd430c8', 0x1d19dad6ba7b814, 0xb4, uuid.RFC_4122, 1), ('7d444840-9dc0-11d1-b245-5ffdce74fad2', '{7d444840-9dc0-11d1-b245-5ffdce74fad2}', '7d4448409dc011d1b2455ffdce74fad2', b'\x7d\x44\x48\x40\x9d\xc0\x11\xd1\xb2\x45\x5f\xfd\xce\x74\xfa\xd2', b'\x40\x48\x44\x7d\xc0\x9d\xd1\x11\xb2\x45\x5f\xfd\xce\x74\xfa\xd2', (0x7d444840, 0x9dc0, 0x11d1, 0xb2, 0x45, 0x5ffdce74fad2), 0x7d4448409dc011d1b2455ffdce74fad2, 'urn:uuid:7d444840-9dc0-11d1-b245-5ffdce74fad2', 0x1d19dc07d444840, 0x3245, uuid.RFC_4122, 1), ('e902893a-9d22-3c7e-a7b8-d6e313b71d9f', '{e902893a-9d22-3c7e-a7b8-d6e313b71d9f}', 'e902893a9d223c7ea7b8d6e313b71d9f', b'\xe9\x02\x89\x3a\x9d\x22\x3c\x7e\xa7\xb8\xd6\xe3\x13\xb7\x1d\x9f', b'\x3a\x89\x02\xe9\x22\x9d\x7e\x3c\xa7\xb8\xd6\xe3\x13\xb7\x1d\x9f', (0xe902893a, 0x9d22, 0x3c7e, 0xa7, 0xb8, 0xd6e313b71d9f), 0xe902893a9d223c7ea7b8d6e313b71d9f, 'urn:uuid:e902893a-9d22-3c7e-a7b8-d6e313b71d9f', 0xc7e9d22e902893a, 0x27b8, uuid.RFC_4122, 3), ('eb424026-6f54-4ef8-a4d0-bb658a1fc6cf', '{eb424026-6f54-4ef8-a4d0-bb658a1fc6cf}', 'eb4240266f544ef8a4d0bb658a1fc6cf', b'\xeb\x42\x40\x26\x6f\x54\x4e\xf8\xa4\xd0\xbb\x65\x8a\x1f\xc6\xcf', b'\x26\x40\x42\xeb\x54\x6f\xf8\x4e\xa4\xd0\xbb\x65\x8a\x1f\xc6\xcf', (0xeb424026, 0x6f54, 0x4ef8, 0xa4, 0xd0, 0xbb658a1fc6cf), 0xeb4240266f544ef8a4d0bb658a1fc6cf, 'urn:uuid:eb424026-6f54-4ef8-a4d0-bb658a1fc6cf', 0xef86f54eb424026, 0x24d0, uuid.RFC_4122, 4), ('f81d4fae-7dec-11d0-a765-00a0c91e6bf6', '{f81d4fae-7dec-11d0-a765-00a0c91e6bf6}', 'f81d4fae7dec11d0a76500a0c91e6bf6', b'\xf8\x1d\x4f\xae\x7d\xec\x11\xd0\xa7\x65\x00\xa0\xc9\x1e\x6b\xf6', b'\xae\x4f\x1d\xf8\xec\x7d\xd0\x11\xa7\x65\x00\xa0\xc9\x1e\x6b\xf6', (0xf81d4fae, 0x7dec, 0x11d0, 0xa7, 0x65, 0x00a0c91e6bf6), 0xf81d4fae7dec11d0a76500a0c91e6bf6, 'urn:uuid:f81d4fae-7dec-11d0-a765-00a0c91e6bf6', 0x1d07decf81d4fae, 0x2765, uuid.RFC_4122, 1), ('fffefdfc-fffe-fffe-fffe-fffefdfcfbfa', '{fffefdfc-fffe-fffe-fffe-fffefdfcfbfa}', 'fffefdfcfffefffefffefffefdfcfbfa', b'\xff\xfe\xfd\xfc\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xfd\xfc\xfb\xfa', b'\xfc\xfd\xfe\xff\xfe\xff\xfe\xff\xff\xfe\xff\xfe\xfd\xfc\xfb\xfa', (0xfffefdfc, 0xfffe, 0xfffe, 0xff, 0xfe, 0xfffefdfcfbfa), 0xfffefdfcfffefffefffefffefdfcfbfa, 'urn:uuid:fffefdfc-fffe-fffe-fffe-fffefdfcfbfa', 0xffefffefffefdfc, 0x3ffe, uuid.RESERVED_FUTURE, None), ('ffffffff-ffff-ffff-ffff-ffffffffffff', '{ffffffff-ffff-ffff-ffff-ffffffffffff}', 'ffffffffffffffffffffffffffffffff', b'\xff'*16, b'\xff'*16, (0xffffffff, 0xffff, 0xffff, 0xff, 0xff, 0xffffffffffff), 0xffffffffffffffffffffffffffffffff, 'urn:uuid:ffffffff-ffff-ffff-ffff-ffffffffffff', 0xfffffffffffffff, 0x3fff, uuid.RESERVED_FUTURE, None), ]: equivalents = [] # Construct each UUID in several different ways. for u in [uuid.UUID(string), uuid.UUID(curly), uuid.UUID(hex), uuid.UUID(bytes=bytes), uuid.UUID(bytes_le=bytes_le), uuid.UUID(fields=fields), uuid.UUID(int=integer), uuid.UUID(urn)]: # Test all conversions and properties of the UUID object. equal(str(u), string) equal(int(u), integer) equal(u.bytes, bytes) equal(u.bytes_le, bytes_le) equal(u.fields, fields) equal(u.time_low, fields[0]) equal(u.time_mid, fields[1]) equal(u.time_hi_version, fields[2]) equal(u.clock_seq_hi_variant, fields[3]) equal(u.clock_seq_low, fields[4]) equal(u.node, fields[5]) equal(u.hex, hex) equal(u.int, integer) equal(u.urn, urn) equal(u.time, time) equal(u.clock_seq, clock_seq) equal(u.variant, variant) equal(u.version, version) equivalents.append(u) # Different construction methods should give the same UUID. for u in equivalents: for v in equivalents: equal(u, v) ascending.append(u) # Test comparison of UUIDs. for i in range(len(ascending)): for j in range(len(ascending)): equal(i < j, ascending[i] < ascending[j]) equal(i <= j, ascending[i] <= ascending[j]) equal(i == j, ascending[i] == ascending[j]) equal(i > j, ascending[i] > ascending[j]) equal(i >= j, ascending[i] >= ascending[j]) equal(i != j, ascending[i] != ascending[j]) # Test sorting of UUIDs (above list is in ascending order). resorted = ascending[:] resorted.reverse() resorted.sort() equal(ascending, resorted) def test_exceptions(self): badvalue = lambda f: self.assertRaises(ValueError, f) badtype = lambda f: self.assertRaises(TypeError, f) # Badly formed hex strings. badvalue(lambda: uuid.UUID('')) badvalue(lambda: uuid.UUID('abc')) badvalue(lambda: uuid.UUID('1234567812345678123456781234567')) badvalue(lambda: uuid.UUID('123456781234567812345678123456789')) badvalue(lambda: uuid.UUID('123456781234567812345678z2345678')) # Badly formed bytes. badvalue(lambda: uuid.UUID(bytes='abc')) badvalue(lambda: uuid.UUID(bytes='\0'*15)) badvalue(lambda: uuid.UUID(bytes='\0'*17)) # Badly formed bytes_le. badvalue(lambda: uuid.UUID(bytes_le='abc')) badvalue(lambda: uuid.UUID(bytes_le='\0'*15)) badvalue(lambda: uuid.UUID(bytes_le='\0'*17)) # Badly formed fields. badvalue(lambda: uuid.UUID(fields=(1,))) badvalue(lambda: uuid.UUID(fields=(1, 2, 3, 4, 5))) badvalue(lambda: uuid.UUID(fields=(1, 2, 3, 4, 5, 6, 7))) # Field values out of range. badvalue(lambda: uuid.UUID(fields=(-1, 0, 0, 0, 0, 0))) badvalue(lambda: uuid.UUID(fields=(0x100000000, 0, 0, 0, 0, 0))) badvalue(lambda: uuid.UUID(fields=(0, -1, 0, 0, 0, 0))) badvalue(lambda: uuid.UUID(fields=(0, 0x10000, 0, 0, 0, 0))) badvalue(lambda: uuid.UUID(fields=(0, 0, -1, 0, 0, 0))) badvalue(lambda: uuid.UUID(fields=(0, 0, 0x10000, 0, 0, 0))) badvalue(lambda: uuid.UUID(fields=(0, 0, 0, -1, 0, 0))) badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0x100, 0, 0))) badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, -1, 0))) badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, 0x100, 0))) badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, 0, -1))) badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, 0, 0x1000000000000))) # Version number out of range. badvalue(lambda: uuid.UUID('00'*16, version=0)) badvalue(lambda: uuid.UUID('00'*16, version=6)) # Integer value out of range. badvalue(lambda: uuid.UUID(int=-1)) badvalue(lambda: uuid.UUID(int=1<<128)) # Must supply exactly one of hex, bytes, fields, int. h, b, f, i = '00'*16, b'\0'*16, (0, 0, 0, 0, 0, 0), 0 uuid.UUID(h) uuid.UUID(hex=h) uuid.UUID(bytes=b) uuid.UUID(bytes_le=b) uuid.UUID(fields=f) uuid.UUID(int=i) # Wrong number of arguments (positional). badtype(lambda: uuid.UUID()) badtype(lambda: uuid.UUID(h, b)) badtype(lambda: uuid.UUID(h, b, b)) badtype(lambda: uuid.UUID(h, b, b, f)) badtype(lambda: uuid.UUID(h, b, b, f, i)) # Duplicate arguments. for hh in [[], [('hex', h)]]: for bb in [[], [('bytes', b)]]: for bble in [[], [('bytes_le', b)]]: for ii in [[], [('int', i)]]: for ff in [[], [('fields', f)]]: args = dict(hh + bb + bble + ii + ff) if len(args) != 0: badtype(lambda: uuid.UUID(h, **args)) if len(args) != 1: badtype(lambda: uuid.UUID(**args)) # Immutability. u = uuid.UUID(h) badtype(lambda: setattr(u, 'hex', h)) badtype(lambda: setattr(u, 'bytes', b)) badtype(lambda: setattr(u, 'bytes_le', b)) badtype(lambda: setattr(u, 'fields', f)) badtype(lambda: setattr(u, 'int', i)) badtype(lambda: setattr(u, 'time_low', 0)) badtype(lambda: setattr(u, 'time_mid', 0)) badtype(lambda: setattr(u, 'time_hi_version', 0)) badtype(lambda: setattr(u, 'time_hi_version', 0)) badtype(lambda: setattr(u, 'clock_seq_hi_variant', 0)) badtype(lambda: setattr(u, 'clock_seq_low', 0)) badtype(lambda: setattr(u, 'node', 0)) def check_node(self, node, source): individual_group_bit = (node >> 40) & 1 universal_local_bit = (node >> 40) & 2 message = "%012x doesn't look like a real MAC address" % node self.assertEqual(individual_group_bit, 0, message) self.assertEqual(universal_local_bit, 0, message) self.assertNotEqual(node, 0, message) self.assertNotEqual(node, 0xffffffffffff, message) self.assert_(0 <= node, message) self.assert_(node < (1 << 48), message) TestUUID.source2node[source] = node if TestUUID.last_node: if TestUUID.last_node != node: msg = "different sources disagree on node:\n" for s, n in TestUUID.source2node.items(): msg += " from source %r, node was %012x\n" % (s, n) # There's actually no reason to expect the MAC addresses # to agree across various methods -- e.g., a box may have # multiple network interfaces, and different ways of getting # a MAC address may favor different HW. ##self.fail(msg) else: TestUUID.last_node = node def test_ifconfig_getnode(self): import sys print(""" WARNING: uuid._ifconfig_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly.""", file=sys.__stdout__) return import os if os.name == 'posix': node = uuid._ifconfig_getnode() if node is not None: self.check_node(node, 'ifconfig') def test_ipconfig_getnode(self): import os if os.name == 'nt': node = uuid._ipconfig_getnode() if node is not None: self.check_node(node, 'ipconfig') def test_netbios_getnode(self): if importable('win32wnet') and importable('netbios'): self.check_node(uuid._netbios_getnode(), 'netbios') def test_random_getnode(self): node = uuid._random_getnode() self.assert_(0 <= node) self.assert_(node < (1 <<48)) def test_unixdll_getnode(self): import sys print(""" WARNING: uuid._unixdll_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly.""", file=sys.__stdout__) return import os if importable('ctypes') and os.name == 'posix': self.check_node(uuid._unixdll_getnode(), 'unixdll') def test_windll_getnode(self): import os if importable('ctypes') and os.name == 'nt': self.check_node(uuid._windll_getnode(), 'windll') def test_getnode(self): import sys print(""" WARNING: uuid.getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly.""", file=sys.__stdout__) return node1 = uuid.getnode() self.check_node(node1, "getnode1") # Test it again to ensure consistency. node2 = uuid.getnode() self.check_node(node2, "getnode2") self.assertEqual(node1, node2) def test_uuid1(self): # uuid1 requires ctypes. try: import ctypes except ImportError: return equal = self.assertEqual # Make sure uuid1() generates UUIDs that are actually version 1. for u in [uuid.uuid1() for i in range(10)]: equal(u.variant, uuid.RFC_4122) equal(u.version, 1) # Make sure the generated UUIDs are actually unique. uuids = {} for u in [uuid.uuid1() for i in range(1000)]: uuids[u] = 1 equal(len(uuids.keys()), 1000) # Make sure the supplied node ID appears in the UUID. u = uuid.uuid1(0) equal(u.node, 0) u = uuid.uuid1(0x123456789abc) equal(u.node, 0x123456789abc) u = uuid.uuid1(0xffffffffffff) equal(u.node, 0xffffffffffff) # Make sure the supplied clock sequence appears in the UUID. u = uuid.uuid1(0x123456789abc, 0) equal(u.node, 0x123456789abc) equal(((u.clock_seq_hi_variant & 0x3f) << 8) | u.clock_seq_low, 0) u = uuid.uuid1(0x123456789abc, 0x1234) equal(u.node, 0x123456789abc) equal(((u.clock_seq_hi_variant & 0x3f) << 8) | u.clock_seq_low, 0x1234) u = uuid.uuid1(0x123456789abc, 0x3fff) equal(u.node, 0x123456789abc) equal(((u.clock_seq_hi_variant & 0x3f) << 8) | u.clock_seq_low, 0x3fff) def test_uuid3(self): equal = self.assertEqual # Test some known version-3 UUIDs. for u, v in [(uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org'), '6fa459ea-ee8a-3ca4-894e-db77e160355e'), (uuid.uuid3(uuid.NAMESPACE_URL, 'http://python.org/'), '9fe8e8c4-aaa8-32a9-a55c-4535a88b748d'), (uuid.uuid3(uuid.NAMESPACE_OID, '1.3.6.1'), 'dd1a1cef-13d5-368a-ad82-eca71acd4cd1'), (uuid.uuid3(uuid.NAMESPACE_X500, 'c=ca'), '658d3002-db6b-3040-a1d1-8ddd7d189a4d'), ]: equal(u.variant, uuid.RFC_4122) equal(u.version, 3) equal(u, uuid.UUID(v)) equal(str(u), v) def test_uuid4(self): # uuid4 requires ctypes. try: import ctypes except ImportError: return equal = self.assertEqual # Make sure uuid4() generates UUIDs that are actually version 4. for u in [uuid.uuid4() for i in range(10)]: equal(u.variant, uuid.RFC_4122) equal(u.version, 4) # Make sure the generated UUIDs are actually unique. uuids = {} for u in [uuid.uuid4() for i in range(1000)]: uuids[u] = 1 equal(len(uuids.keys()), 1000) def test_uuid5(self): equal = self.assertEqual # Test some known version-5 UUIDs. for u, v in [(uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org'), '886313e1-3b8a-5372-9b90-0c9aee199e5d'), (uuid.uuid5(uuid.NAMESPACE_URL, 'http://python.org/'), '4c565f0d-3f5a-5890-b41b-20cf47701c5e'), (uuid.uuid5(uuid.NAMESPACE_OID, '1.3.6.1'), '1447fa61-5277-5fef-a9b3-fbc6e44f4af3'), (uuid.uuid5(uuid.NAMESPACE_X500, 'c=ca'), 'cc957dd1-a972-5349-98cd-874190002798'), ]: equal(u.variant, uuid.RFC_4122) equal(u.version, 5) equal(u, uuid.UUID(v)) equal(str(u), v) def test_main(): support.run_unittest(TestUUID) if __name__ == '__main__': test_main()
gpl-3.0
DanielSantoyo/SmallBarrel
source/lib_grows/genesi.py
1
13689
#################################################### #Software License: # #--------------------------------------------------# #genesi.py, a pygame library to create the images of the game. #Copyright (C) 2016 Daniel Santoyo Gomez #This library is free software; you can redistribute it and/or #modify it under the terms of the GNU Lesser General Public #License as published by the Free Software Foundation; either #version 2.1 of the License, or (at your option) any later version. #This library is distributed in the hope that it will be useful, #but WITHOUT ANY WARRANTY; without even the implied warranty of #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #Lesser General Public License for more details. #You should have received a copy of the GNU Lesser General Public #License along with this library; if not, write to the Free Software #Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #contact: daniel.santoyo@gmx.com from random import randint import os.path import threading from perlin_noise import PerlinNoise import cPickle as pickle from pygame import image as Image from pygame import Surface from lib_scribi import nomi_lingue from lib_grafica.game_constants import * def miner_random(n): #EDIT: da sistemare n.nome = nomi_lingue.nome_random(n.sesso) n.cognome = nomi_lingue.cognome_random() n.eta = randint(1,50) vitalita = [1, 1, 1, 1, 2, 2, 3, 4, 5, 6, 6, 7, 8, 9, 10, 11, 12, 12, 13, 14, 14, 15, 16, 16, 17, 17, 18, 18, 18, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 19, 19, 19, 19, 19, 18, 18, 18, 18, 18, 17, 17, 17, 16, 16, 16, 16, 15, 15, 15, 14, 14, 14, 13, 13, 13, 13, 12, 12, 12, 11, 11, 11, 11, 10, 10, 10, 9, 9, 9, 9, 9, 8, 8, 8, 8, 7, 7, 7, 7, 7, 6, 6, 6, 6] n.vitalita = vitalita[n.eta]+randint(1,3) n.salute = n.vitalita forza = [1, 1, 1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 19, 20, 20, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 21, 21, 21, 20, 20, 20, 19, 19, 18, 18, 17, 17, 16, 16, 15, 15, 14, 14, 13, 13, 12, 12, 11, 11, 10, 10, 9, 9, 9, 8, 8, 7, 7, 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1] n.forza = forza[n.eta]+randint(1,3) t = 0 arr = [] for i in range(n.eta): if(i%2)==0: t += randint(0,2) n.intelligenza = t t = 0 for i in range(n.eta): if(i%3)==0: t += randint(0,2) if t > 20: t = 20 n.saggezza = t t = 0 for i in range(3): t += randint(1,7) n.carisma = t #psico t = 0 for i in range(3): t += randint(1,7) n.umilta = t t = 0 for i in range(3): t += randint(1,7) n.compassione = t t = 0 for i in range(3): t += randint(1,7) n.carattere = t #modifica velocita' return n #----------------------------------------------------------------------# class World_maker(threading.Thread): def __init__(self,name): threading.Thread.__init__(self) self.tiles = [] self.matrix = {} self.name = name self.pkl_name = "" def open_textual_matrix(self, filename): if os.path.isfile(filename+'.pkl'): print "[WORLD MAKER]: load", filename, "\t" self.matrix = pickle.load( open( filename+".pkl", "rb" ) ) load = True self.pkl_name = filename+'.pkl' else: '''apre un file di testo casuale con interi separati da spazio, toglie il \n alla fine di ogni riga e trasforma tutti i valori in interi. restituisce un array''' filename = filename.strip('.txt') with open(filename+'.txt', "r") as file: matr = map(lambda v: v[:-1].split(), file.readlines() ) matr = reduce(lambda x,y: x+y, matr) matr = map(lambda x: int(x),matr) self.pkl_name = filename+'.pkl' for id in range(max(matr)): self.matrix[id] = [ [] for _ in range(H_WORLD)] for y in range(H_WORLD): t = [] for x in range(W_WORLD): value = T_VOID if matr[x+y*W_WORLD] >= id: value = T_ERBA t.append(value) self.matrix[id][y] = t self.dimz = len(self.matrix) self.dimy = len(self.matrix[0]) self.dimx = len(self.matrix[0][0]) load = False self.maximun = len(self.matrix) return load def get_dims(self): self.dimz = len(self.matrix) self.dimy = len(self.matrix[0]) self.dimx = len(self.matrix[0][0]) def make_mountain(self, W_WORLD,H_WORLD, nome = ""): '''carica una collina casuale dalla memoria ROM e prepara la matrice''' if nome == "": #verifica l'esistenza di una montagna gia' creata in graphics/results nome = r"lib_grows/perlin_maps/collina"+str(randint(0,19)) for i in xrange(20): filename = r"../graphics/results/collina"+str(i) if os.path.isfile(filename+".pkl"): nome = filename print "[WORLD MAKER]: found "+filename+".pkl in memory... load\t" break if self.open_textual_matrix(nome) == False: #modifica il chunk, gestisci i tile erba e la sabbia for z in xrange(self.dimz): for y in xrange(self.dimy): for x in xrange(self.dimx): if self.matrix[z][y][x] == T_VOID: if z <= Z_SEA: self.matrix[z][y][x] = T_ACQUA if self.matrix[z][y][x] == T_ERBA: if z < self.dimz-1: if y < self.dimy-1 and x < self.dimx -1: if self.matrix[z+1][y][x] == T_ERBA: if self.matrix[z][y+1][x] == T_ERBA and self.matrix[z][y][x+1] == T_ERBA: self.matrix[z][y][x] = T_HIDE continue if z == Z_SEA: if y < self.dimy-1: if self.matrix[z][y+1][x] == T_VOID: self.matrix[z][y][x] = T_SABBIA continue if x < self.dimx-1: if self.matrix[z][y][x+1] == T_VOID: self.matrix[z][y][x] = T_SABBIA continue if x > 0: if self.matrix[z][y][x-1] == T_VOID: self.matrix[z][y][x] = T_SABBIA continue if y > 0: if self.matrix[z][y-1][x] == T_VOID: self.matrix[z][y][x] = T_SABBIA continue if z < Z_SEA and self.matrix[z+1][y][x] == T_VOID: self.matrix[z][y][x] = T_SABBIA continue if self.matrix[z+1][y][x] != T_VOID: self.matrix[z][y][x] = T_TERRA if z >= Z_SNOW:# gestisci la neve if self.matrix[z][y][x] == T_ERBA: self.matrix[z][y][x] = T_NEVE border_type = 0 if self.matrix[z][y][x-1] == T_VOID: border_type = border_type | 1 if self.matrix[z][y-1][x] == T_VOID: border_type = border_type | 2 self.matrix[z][y][x] += '_'+str(border_type) else: self.get_dims() def conservatore(self, num): #"picklellatore" per le matrici testuali print "scrittura pickle in corso" for i in range(num): print "%3.2f %%" % (float(i*100.0/num)),"\r", nome = r"lib_grows/perlin_maps/collina"+str(i)+".txt" self.matrix = {} self.make_mountain(W_WORLD,H_WORLD, nome) pickle.dump( self.matrix, open( nome.split('.')[0]+'.pkl', "wb" ) ) def load_tile(self,element,z, side = 1): # side = 1: bottom, side = 0: top if element == T_VOID: return False else: tile = Image.load(r"../graphics/tile"+element+".png") if element == T_ACQUA: if side == 0: return tile else: return False tile = tile.subsurface(0,side*(BLOCCOY/2),BLOCCOX,BLOCCOY/2)#left top width height return tile def genera_immagini_chunk(self): filename = r"../graphics/results/"+ self.pkl_name.split('/')[-1] if os.path.isfile(filename): print "[WORLD MAKER]: nothing to save..." return z_max = self.maximun dz_height = int((z_max)*(BLOCCOY-2*DY)) height = int(2*DY*(self.dimy-1)+BLOCCOY + dz_height) width = int(BLOCCOX*(self.dimx)) print "[WORLD MAKER]: generation of chunk images\t" background_final = Surface((width, height)) background_final.set_colorkey(TRANSPARENCY) background_final.fill(TRANSPARENCY) #sea_background = Surface((width, height)) #sea_background.set_colorkey(TRANSPARENCY) #sea_background.fill(TRANSPARENCY) for z in range(self.dimz): background = Surface((width, height)) #immagine con i tiles bassi foreground = Surface((width, height)) #immagine con i tiles alti background.fill(TRANSPARENCY) foreground.fill(TRANSPARENCY) background.set_colorkey(TRANSPARENCY) foreground.set_colorkey(TRANSPARENCY) for y in range(self.dimy): for x in range(self.dimx): t_type = self.matrix[z][y][x] tile = self.load_tile(t_type,z,1) tile_up = self.load_tile(t_type,z,0) if tile: xo = width/2 + (x-y-1)*DX yo = (x+y)*DY - z*DZ + dz_height tileRect = tile.get_rect() tileRect.topleft = (int(xo),int(yo)+BLOCCOY/2) background.blit(tile,tileRect) if tile_up: xo = width/2 + (x-y-1)*DX yo = (x+y)*DY - z*DZ + dz_height tileRect = tile_up.get_rect() tileRect.topleft = (int(xo),int(yo)) #if t_type == T_ACQUA: # sea_background.blit(tile_up,tileRect) #else: foreground.blit(tile_up,tileRect) background_final.blit(background,background.get_rect()) background_final.blit(foreground,background.get_rect()) data = Image.tostring(background, "RGBA") surf = Image.fromstring(data, (width, height), 'RGBA', False) Image.save(surf,r"../graphics/results/hill_"+str(z)+"_d.png") data = Image.tostring(foreground, "RGBA") surf = Image.fromstring(data, (width, height), 'RGBA', False) Image.save(surf,r"../graphics/results/hill_"+str(z)+"_u.png") #data = Image.tostring(sea_background, "RGBA") #surf = Image.fromstring(data, (width, height), 'RGBA', False) #Image.save(surf,r"../graphics/results/sea.png") Image.save(background_final,r"../graphics/results/all_hill.png") pickle.dump( self.matrix, open( r"../graphics/results/"+self.pkl_name.split('/')[-1], "wb" ) ) def make_perlin_mountains(self,num): #processo (lento) di creazione mappe con PerlinNoise print "[WORLD MAKER]: generation of perlin_maps\t" for k in xrange(num): print "%3.2f %%" % (float(k*100.0/num)),"\r", matr = [] P = PerlinNoise() matr = P.run(W_WORLD, H_WORLD, 0.25,[0.065,0.125,0.25,0.5,0.7,1,1.6,1.8,2],D_WORLD+randint(-2,2)) t = '' for i in range(W_WORLD): for j in range(H_WORLD): t += str(matr[i+j*W_WORLD]).ljust(2,' ')+ ' ' t += '\n' with open(r"lib_grows/perlin_maps/collina"+str(k)+".txt","w") as out: out.write(t) def start(self):# start Thread print self.name + ": start" self.make_mountain(W_WORLD,H_WORLD) self.genera_immagini_chunk() return self.matrix def __del__(self): print self.name + ": complete creation\t"
artistic-2.0
cypsun/FreeCAD
src/Mod/Path/PathScripts/PathProject.py
6
6281
# -*- coding: utf-8 -*- #*************************************************************************** #* * #* Copyright (c) 2014 Yorik van Havre <yorik@uncreated.net> * #* * #* This program is free software; you can redistribute it and/or modify * #* it under the terms of the GNU Lesser General Public License (LGPL) * #* as published by the Free Software Foundation; either version 2 of * #* the License, or (at your option) any later version. * #* for detail see the LICENCE text file. * #* * #* This program is distributed in the hope that it will be useful, * #* but WITHOUT ANY WARRANTY; without even the implied warranty of * #* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * #* GNU Library General Public License for more details. * #* * #* You should have received a copy of the GNU Library General Public * #* License along with this program; if not, write to the Free Software * #* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * #* USA * #* * #*************************************************************************** import FreeCAD,FreeCADGui,Path,PathGui from PySide import QtCore,QtGui """Path Project object and FreeCAD command""" # Qt tanslation handling try: _encoding = QtGui.QApplication.UnicodeUTF8 def translate(context, text, disambig=None): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def translate(context, text, disambig=None): return QtGui.QApplication.translate(context, text, disambig) class ObjectPathProject: def __init__(self,obj): # obj.addProperty("App::PropertyFile", "PostProcessor", "CodeOutput", translate("PostProcessor","Select the Post Processor file for this project")) obj.addProperty("App::PropertyFile", "OutputFile", "CodeOutput", translate("OutputFile","The NC output file for this project")) obj.setEditorMode("OutputFile",0) #set to default mode # obj.addProperty("App::PropertyBool","Editor","CodeOutput",translate("Show Editor","Show G-Code in simple editor after posting code")) # obj.addProperty("Path::PropertyTooltable","Tooltable", "Path",translate("PathProject","The tooltable of this feature")) obj.addProperty("App::PropertyString", "Description","Path",translate("PathProject","An optional description for this project")) obj.Proxy = self def __getstate__(self): return None def __setstate__(self,state): return None def onChanged(self,obj,prop): pass def execute(self,obj): cmds = [] for child in obj.Group: if child.isDerivedFrom("Path::Feature"): cmds.extend(child.Path.Commands) if cmds: path = Path.Path(cmds) obj.Path = path class ViewProviderProject: def __init__(self,vobj): vobj.Proxy = self mode = 2 vobj.setEditorMode('LineWidth',mode) vobj.setEditorMode('MarkerColor',mode) vobj.setEditorMode('NormalColor',mode) # vobj.setEditorMode('ShowFirstRapid',mode) vobj.setEditorMode('BoundingBox',mode) vobj.setEditorMode('DisplayMode',mode) vobj.setEditorMode('Selectable',mode) vobj.setEditorMode('ShapeColor',mode) vobj.setEditorMode('Transparency',mode) vobj.setEditorMode('Visibility',mode) def __getstate__(self): #mandatory return None def __setstate__(self,state): #mandatory return None def getIcon(self): return ":/icons/Path-Project.svg" def onChanged(self,vobj,prop): mode = 2 vobj.setEditorMode('LineWidth',mode) vobj.setEditorMode('MarkerColor',mode) vobj.setEditorMode('NormalColor',mode) # vobj.setEditorMode('ShowFirstRapid',mode) vobj.setEditorMode('BoundingBox',mode) vobj.setEditorMode('DisplayMode',mode) vobj.setEditorMode('Selectable',mode) vobj.setEditorMode('ShapeColor',mode) vobj.setEditorMode('Transparency',mode) vobj.setEditorMode('Visibility',mode) class CommandProject: def GetResources(self): return {'Pixmap' : 'Path-Project', 'MenuText': QtCore.QT_TRANSLATE_NOOP("PathProject","Project"), 'Accel': "P, P", 'ToolTip': QtCore.QT_TRANSLATE_NOOP("PathProject","Creates a Path Project object")} def IsActive(self): return not FreeCAD.ActiveDocument is None def Activated(self): incl = [] sel = FreeCADGui.Selection.getSelection() for obj in sel: if obj.isDerivedFrom("Path::Feature"): incl.append(obj) FreeCAD.ActiveDocument.openTransaction(translate("PathProject","Create Project")) CommandProject.Create(incl) FreeCAD.ActiveDocument.commitTransaction() FreeCAD.ActiveDocument.recompute() @staticmethod def Create(pathChildren = []): """Code to create a project""" #FreeCADGui.addModule("PathScripts.PathProject") obj = FreeCAD.ActiveDocument.addObject("Path::FeatureCompoundPython","Project") ObjectPathProject(obj) if pathChildren: for child in pathChildren: pathChildren.append(FreeCAD.ActiveDocument.getObject(obj.Name)) obj.Group = pathChildren ViewProviderProject(obj.ViewObject) #create a machine obj import PathScripts PathScripts.PathMachine.CommandPathMachine.Create() return obj if FreeCAD.GuiUp: # register the FreeCAD command FreeCADGui.addCommand('Path_Project',CommandProject()) FreeCAD.Console.PrintLog("Loading PathProject... done\n")
lgpl-2.1
MaxiTalenti/IA
Trabajos practicos 2016/probar_entrega_1_local.py
1
5608
# coding: utf-8 from datetime import datetime import inspect import os import sys problemas = [] recomendaciones = [] advertencias = [] def validar_tiempo(inicio, fin, tope, listado, mensaje): diferencia = (fin - inicio).total_seconds() if diferencia > tope: listado.append(mensaje) def probar_codigo(interactivo=False, saltear_errores=False): # dependencias try: from simpleai.search.models import SearchNode except ImportError: problemas.append(u'No se pudo importar SimpleAI. Se encuentra instalado?') return # intentar importar la entrega print u'Importando la entrega...' try: inicio = datetime.now() import entrega_1_local fin = datetime.now() except ImportError: problemas.append(u'No se pudo encontrar el código python. Probablemente el nombre del archivo .py no es correcto, o no está en la raiz del repositorio.') return validar_tiempo(inicio, fin, 5, problemas, u'El import de la entrega demora demasiado tiempo, probablemente están haciendo búsqueda en el import. Hagan lo del if __name__ ... que se recomienda en la consigna.') # intentar extraer y validar la funcion resolver print u'Extrayendo la función resolver...' resolver = getattr(entrega_1_local, 'resolver', None) if resolver is None: problemas.append(u'El módulo python no define la función resolver.') return if inspect.getargspec(resolver)[0] != ['metodo_busqueda', 'iteraciones', 'haz', 'reinicios']: print resolver.__code__.co_varnames problemas.append(u'La función resolver no recibe los parámetros definidos en la entrega.') return # validar el funcionamiento de la funcion resolver y el planteo del problema en general print u'Probando la resolución de problemas...' # metodo_busqueda, posicion_rey, graph_search, limite_largo_camino, limite_tiempo pruebas = ( ('hill_climbing', dict(iteraciones=50)), ('hill_climbing_stochastic', dict(iteraciones=50)), ('beam', dict(iteraciones=50, haz=5)), ('hill_climbing_random_restarts', dict(iteraciones=50, reinicios=5)), ('simulated_annealing', dict(iteraciones=50)), ) parametros_default = dict(reinicios=None, haz=None) for numero_prueba, (metodo_busqueda, parametros_pisados) in enumerate(pruebas): parametros = parametros_default.copy() parametros.update(parametros_pisados) print u' Prueba', numero_prueba, ':', \ metodo_busqueda, \ u'con parametros', parametros if not interactivo or raw_input('ejecutar? (Y/n)').strip() in ('y', ''): try: inicio = datetime.now() resultado = resolver(metodo_busqueda=metodo_busqueda, **parametros) fin = datetime.now() if not isinstance(resultado, SearchNode): problemas.append(u'El resultado devuelto por la función resolver en la prueba %i no es un nodo de búsqueda. Puede que la función resolver no esté devolviendo el nodo resultante, o que el problema no esté encontrando solución como debería.' % numero_prueba) else: print u' meta:', repr(resultado.state) print u' valor:', repr(resultado.value) except Exception as err: if saltear_errores: problemas.append(u'Error al ejecutar %s (%s)' % (metodo_busqueda, str(err))) else: raise def probar_estadisticas(): # abrir el archivo de estadisticas print u'Abriendo estadísticas...' nombre_archivo = 'entrega_1_local.txt' if not os.path.exists(nombre_archivo): problemas.append(u'No se pudo encontrar el archivo de estadísticas. Probablemente el nombre del archivo no es correcto, o no está en la raiz del repositorio.') return with open(nombre_archivo) as archivo_stats: lineas_stats = archivo_stats.readlines() # validar contenidos casos = range(1, 6) casos_pendientes = casos[:] for linea in lineas_stats: linea = linea.strip() if linea: try: caso, valor = map(int, linea.split(':')) if caso not in casos: problemas.append(u'Caso desconocido en archivo de estadísticas: %i' % caso) elif caso not in casos_pendientes: problemas.append(u'Caso repetido en archivo de estadísticas: %i' % caso) else: print u" Encontrado caso", caso casos_pendientes.remove(caso) except: problemas.append(u'La siguiente linea de estadísticas no respeta el formato definido: %s' % linea) if casos_pendientes: problemas.append(u'No se incluyeron las estadísticas de los siguientes casos: %s' % repr(casos_pendientes)) def imprimir_resultados(): def listar_cosas(titulo, cosas): if cosas: print titulo + ':' for cosa in cosas: print '*', cosa listar_cosas(u'Problemas que es necesario corregir', problemas) listar_cosas(u'Advertencias (cosas que pueden ser un problema, aunque no siempre)', advertencias) listar_cosas(u'Recomendaciones', recomendaciones) if __name__ == '__main__': print probar_codigo() print probar_estadisticas() print print u'Pruebas automáticas terminadas!' print imprimir_resultados()
apache-2.0
borysiasty/QGIS
tests/src/python/test_qgsratiolockbutton.py
30
4394
# -*- coding: utf-8 -*- """QGIS Unit tests for QgsRatioLockButton .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = 'Nyall Dawson' __date__ = '18/07/2017' __copyright__ = 'Copyright 2017, The QGIS Project' import qgis # NOQA from qgis.gui import QgsRatioLockButton from qgis.PyQt.QtWidgets import QDoubleSpinBox from qgis.testing import start_app, unittest start_app() class TestQgsRatioLockButton(unittest.TestCase): def testLinkedWidgets(self): """ test linking spin boxes to combobox""" w = qgis.gui.QgsRatioLockButton() spin_width = QDoubleSpinBox() spin_width.setMaximum(100000) spin_height = QDoubleSpinBox() spin_height.setMaximum(100000) w.setWidthSpinBox(spin_width) spin_width.setValue(1000) self.assertEqual(spin_width.value(), 1000) w.setLocked(True) spin_width.setValue(2000) self.assertEqual(spin_width.value(), 2000) w.setLocked(False) w.setHeightSpinBox(spin_height) spin_width.setValue(1000) self.assertEqual(spin_width.value(), 1000) self.assertEqual(spin_height.value(), 0) w.setLocked(True) spin_width.setValue(2000) self.assertEqual(spin_width.value(), 2000) self.assertEqual(spin_height.value(), 0) spin_height.setValue(1000) self.assertEqual(spin_width.value(), 2000) self.assertEqual(spin_height.value(), 1000) # ok, that was all setup tests... let's check the real thing now spin_width.setValue(1000) self.assertEqual(spin_width.value(), 1000) self.assertEqual(spin_height.value(), 500) spin_height.setValue(1000) self.assertEqual(spin_width.value(), 2000) self.assertEqual(spin_height.value(), 1000) w.setLocked(False) spin_width.setValue(1000) self.assertEqual(spin_width.value(), 1000) self.assertEqual(spin_height.value(), 1000) spin_height.setValue(2000) self.assertEqual(spin_width.value(), 1000) self.assertEqual(spin_height.value(), 2000) w.setLocked(True) spin_height.setValue(1000) self.assertEqual(spin_width.value(), 500) self.assertEqual(spin_height.value(), 1000) # setting to 0 should "break" lock spin_height.setValue(0) self.assertEqual(spin_width.value(), 500) self.assertEqual(spin_height.value(), 0) spin_width.setValue(1000) self.assertEqual(spin_width.value(), 1000) self.assertEqual(spin_height.value(), 0) spin_height.setValue(100) self.assertEqual(spin_width.value(), 1000) self.assertEqual(spin_height.value(), 100) spin_width.setValue(0) self.assertEqual(spin_width.value(), 0) self.assertEqual(spin_height.value(), 100) spin_height.setValue(1000) self.assertEqual(spin_width.value(), 0) self.assertEqual(spin_height.value(), 1000) spin_width.setValue(200) self.assertEqual(spin_width.value(), 200) self.assertEqual(spin_height.value(), 1000) def testResetRatio(self): w = qgis.gui.QgsRatioLockButton() spin_width = QDoubleSpinBox() spin_width.setMaximum(100000) spin_height = QDoubleSpinBox() spin_height.setMaximum(100000) spin_width.setValue(1000) w.setWidthSpinBox(spin_width) spin_height.setValue(500) w.setHeightSpinBox(spin_height) w.setLocked(True) spin_width.setValue(2000) self.assertEqual(spin_height.value(), 1000) spin_width.blockSignals(True) spin_width.setValue(1000) spin_width.blockSignals(False) spin_height.setValue(2000) self.assertEqual(spin_width.value(), 4000) # signals were blocked, so ratio wasn't updated spin_width.blockSignals(True) spin_width.setValue(2000) spin_width.blockSignals(False) w.resetRatio() # since signals were blocked, we need to manually reset ratio spin_height.setValue(1000) self.assertEqual(spin_width.value(), 1000) if __name__ == '__main__': unittest.main()
gpl-2.0
harmy/kbengine
kbe/res/scripts/common/Lib/test/test_urlparse.py
2
40955
#! /usr/bin/env python3 from test import support import unittest import urllib.parse RFC1808_BASE = "http://a/b/c/d;p?q#f" RFC2396_BASE = "http://a/b/c/d;p?q" RFC3986_BASE = 'http://a/b/c/d;p?q' SIMPLE_BASE = 'http://a/b/c/d' # A list of test cases. Each test case is a a two-tuple that contains # a string with the query and a dictionary with the expected result. parse_qsl_test_cases = [ ("", []), ("&", []), ("&&", []), ("=", [('', '')]), ("=a", [('', 'a')]), ("a", [('a', '')]), ("a=", [('a', '')]), ("a=", [('a', '')]), ("&a=b", [('a', 'b')]), ("a=a+b&b=b+c", [('a', 'a b'), ('b', 'b c')]), ("a=1&a=2", [('a', '1'), ('a', '2')]), (b"", []), (b"&", []), (b"&&", []), (b"=", [(b'', b'')]), (b"=a", [(b'', b'a')]), (b"a", [(b'a', b'')]), (b"a=", [(b'a', b'')]), (b"a=", [(b'a', b'')]), (b"&a=b", [(b'a', b'b')]), (b"a=a+b&b=b+c", [(b'a', b'a b'), (b'b', b'b c')]), (b"a=1&a=2", [(b'a', b'1'), (b'a', b'2')]), ] class UrlParseTestCase(unittest.TestCase): def checkRoundtrips(self, url, parsed, split): result = urllib.parse.urlparse(url) self.assertEqual(result, parsed) t = (result.scheme, result.netloc, result.path, result.params, result.query, result.fragment) self.assertEqual(t, parsed) # put it back together and it should be the same result2 = urllib.parse.urlunparse(result) self.assertEqual(result2, url) self.assertEqual(result2, result.geturl()) # the result of geturl() is a fixpoint; we can always parse it # again to get the same result: result3 = urllib.parse.urlparse(result.geturl()) self.assertEqual(result3.geturl(), result.geturl()) self.assertEqual(result3, result) self.assertEqual(result3.scheme, result.scheme) self.assertEqual(result3.netloc, result.netloc) self.assertEqual(result3.path, result.path) self.assertEqual(result3.params, result.params) self.assertEqual(result3.query, result.query) self.assertEqual(result3.fragment, result.fragment) self.assertEqual(result3.username, result.username) self.assertEqual(result3.password, result.password) self.assertEqual(result3.hostname, result.hostname) self.assertEqual(result3.port, result.port) # check the roundtrip using urlsplit() as well result = urllib.parse.urlsplit(url) self.assertEqual(result, split) t = (result.scheme, result.netloc, result.path, result.query, result.fragment) self.assertEqual(t, split) result2 = urllib.parse.urlunsplit(result) self.assertEqual(result2, url) self.assertEqual(result2, result.geturl()) # check the fixpoint property of re-parsing the result of geturl() result3 = urllib.parse.urlsplit(result.geturl()) self.assertEqual(result3.geturl(), result.geturl()) self.assertEqual(result3, result) self.assertEqual(result3.scheme, result.scheme) self.assertEqual(result3.netloc, result.netloc) self.assertEqual(result3.path, result.path) self.assertEqual(result3.query, result.query) self.assertEqual(result3.fragment, result.fragment) self.assertEqual(result3.username, result.username) self.assertEqual(result3.password, result.password) self.assertEqual(result3.hostname, result.hostname) self.assertEqual(result3.port, result.port) def test_qsl(self): for orig, expect in parse_qsl_test_cases: result = urllib.parse.parse_qsl(orig, keep_blank_values=True) self.assertEqual(result, expect, "Error parsing %r" % orig) expect_without_blanks = [v for v in expect if len(v[1])] result = urllib.parse.parse_qsl(orig, keep_blank_values=False) self.assertEqual(result, expect_without_blanks, "Error parsing %r" % orig) def test_roundtrips(self): str_cases = [ ('file:///tmp/junk.txt', ('file', '', '/tmp/junk.txt', '', '', ''), ('file', '', '/tmp/junk.txt', '', '')), ('imap://mail.python.org/mbox1', ('imap', 'mail.python.org', '/mbox1', '', '', ''), ('imap', 'mail.python.org', '/mbox1', '', '')), ('mms://wms.sys.hinet.net/cts/Drama/09006251100.asf', ('mms', 'wms.sys.hinet.net', '/cts/Drama/09006251100.asf', '', '', ''), ('mms', 'wms.sys.hinet.net', '/cts/Drama/09006251100.asf', '', '')), ('nfs://server/path/to/file.txt', ('nfs', 'server', '/path/to/file.txt', '', '', ''), ('nfs', 'server', '/path/to/file.txt', '', '')), ('svn+ssh://svn.zope.org/repos/main/ZConfig/trunk/', ('svn+ssh', 'svn.zope.org', '/repos/main/ZConfig/trunk/', '', '', ''), ('svn+ssh', 'svn.zope.org', '/repos/main/ZConfig/trunk/', '', '')), ('git+ssh://git@github.com/user/project.git', ('git+ssh', 'git@github.com','/user/project.git', '','',''), ('git+ssh', 'git@github.com','/user/project.git', '', '')), ] def _encode(t): return (t[0].encode('ascii'), tuple(x.encode('ascii') for x in t[1]), tuple(x.encode('ascii') for x in t[2])) bytes_cases = [_encode(x) for x in str_cases] for url, parsed, split in str_cases + bytes_cases: self.checkRoundtrips(url, parsed, split) def test_http_roundtrips(self): # urllib.parse.urlsplit treats 'http:' as an optimized special case, # so we test both 'http:' and 'https:' in all the following. # Three cheers for white box knowledge! str_cases = [ ('://www.python.org', ('www.python.org', '', '', '', ''), ('www.python.org', '', '', '')), ('://www.python.org#abc', ('www.python.org', '', '', '', 'abc'), ('www.python.org', '', '', 'abc')), ('://www.python.org?q=abc', ('www.python.org', '', '', 'q=abc', ''), ('www.python.org', '', 'q=abc', '')), ('://www.python.org/#abc', ('www.python.org', '/', '', '', 'abc'), ('www.python.org', '/', '', 'abc')), ('://a/b/c/d;p?q#f', ('a', '/b/c/d', 'p', 'q', 'f'), ('a', '/b/c/d;p', 'q', 'f')), ] def _encode(t): return (t[0].encode('ascii'), tuple(x.encode('ascii') for x in t[1]), tuple(x.encode('ascii') for x in t[2])) bytes_cases = [_encode(x) for x in str_cases] str_schemes = ('http', 'https') bytes_schemes = (b'http', b'https') str_tests = str_schemes, str_cases bytes_tests = bytes_schemes, bytes_cases for schemes, test_cases in (str_tests, bytes_tests): for scheme in schemes: for url, parsed, split in test_cases: url = scheme + url parsed = (scheme,) + parsed split = (scheme,) + split self.checkRoundtrips(url, parsed, split) def checkJoin(self, base, relurl, expected): str_components = (base, relurl, expected) self.assertEqual(urllib.parse.urljoin(base, relurl), expected) bytes_components = baseb, relurlb, expectedb = [ x.encode('ascii') for x in str_components] self.assertEqual(urllib.parse.urljoin(baseb, relurlb), expectedb) def test_unparse_parse(self): str_cases = ['Python', './Python','x-newscheme://foo.com/stuff','x://y','x:/y','x:/','/',] bytes_cases = [x.encode('ascii') for x in str_cases] for u in str_cases + bytes_cases: self.assertEqual(urllib.parse.urlunsplit(urllib.parse.urlsplit(u)), u) self.assertEqual(urllib.parse.urlunparse(urllib.parse.urlparse(u)), u) def test_RFC1808(self): # "normal" cases from RFC 1808: self.checkJoin(RFC1808_BASE, 'g:h', 'g:h') self.checkJoin(RFC1808_BASE, 'g', 'http://a/b/c/g') self.checkJoin(RFC1808_BASE, './g', 'http://a/b/c/g') self.checkJoin(RFC1808_BASE, 'g/', 'http://a/b/c/g/') self.checkJoin(RFC1808_BASE, '/g', 'http://a/g') self.checkJoin(RFC1808_BASE, '//g', 'http://g') self.checkJoin(RFC1808_BASE, 'g?y', 'http://a/b/c/g?y') self.checkJoin(RFC1808_BASE, 'g?y/./x', 'http://a/b/c/g?y/./x') self.checkJoin(RFC1808_BASE, '#s', 'http://a/b/c/d;p?q#s') self.checkJoin(RFC1808_BASE, 'g#s', 'http://a/b/c/g#s') self.checkJoin(RFC1808_BASE, 'g#s/./x', 'http://a/b/c/g#s/./x') self.checkJoin(RFC1808_BASE, 'g?y#s', 'http://a/b/c/g?y#s') self.checkJoin(RFC1808_BASE, 'g;x', 'http://a/b/c/g;x') self.checkJoin(RFC1808_BASE, 'g;x?y#s', 'http://a/b/c/g;x?y#s') self.checkJoin(RFC1808_BASE, '.', 'http://a/b/c/') self.checkJoin(RFC1808_BASE, './', 'http://a/b/c/') self.checkJoin(RFC1808_BASE, '..', 'http://a/b/') self.checkJoin(RFC1808_BASE, '../', 'http://a/b/') self.checkJoin(RFC1808_BASE, '../g', 'http://a/b/g') self.checkJoin(RFC1808_BASE, '../..', 'http://a/') self.checkJoin(RFC1808_BASE, '../../', 'http://a/') self.checkJoin(RFC1808_BASE, '../../g', 'http://a/g') # "abnormal" cases from RFC 1808: self.checkJoin(RFC1808_BASE, '', 'http://a/b/c/d;p?q#f') self.checkJoin(RFC1808_BASE, '../../../g', 'http://a/../g') self.checkJoin(RFC1808_BASE, '../../../../g', 'http://a/../../g') self.checkJoin(RFC1808_BASE, '/./g', 'http://a/./g') self.checkJoin(RFC1808_BASE, '/../g', 'http://a/../g') self.checkJoin(RFC1808_BASE, 'g.', 'http://a/b/c/g.') self.checkJoin(RFC1808_BASE, '.g', 'http://a/b/c/.g') self.checkJoin(RFC1808_BASE, 'g..', 'http://a/b/c/g..') self.checkJoin(RFC1808_BASE, '..g', 'http://a/b/c/..g') self.checkJoin(RFC1808_BASE, './../g', 'http://a/b/g') self.checkJoin(RFC1808_BASE, './g/.', 'http://a/b/c/g/') self.checkJoin(RFC1808_BASE, 'g/./h', 'http://a/b/c/g/h') self.checkJoin(RFC1808_BASE, 'g/../h', 'http://a/b/c/h') # RFC 1808 and RFC 1630 disagree on these (according to RFC 1808), # so we'll not actually run these tests (which expect 1808 behavior). #self.checkJoin(RFC1808_BASE, 'http:g', 'http:g') #self.checkJoin(RFC1808_BASE, 'http:', 'http:') def test_RFC2368(self): # Issue 11467: path that starts with a number is not parsed correctly self.assertEqual(urllib.parse.urlparse('mailto:1337@example.org'), ('mailto', '', '1337@example.org', '', '', '')) def test_RFC2396(self): # cases from RFC 2396 self.checkJoin(RFC2396_BASE, 'g:h', 'g:h') self.checkJoin(RFC2396_BASE, 'g', 'http://a/b/c/g') self.checkJoin(RFC2396_BASE, './g', 'http://a/b/c/g') self.checkJoin(RFC2396_BASE, 'g/', 'http://a/b/c/g/') self.checkJoin(RFC2396_BASE, '/g', 'http://a/g') self.checkJoin(RFC2396_BASE, '//g', 'http://g') self.checkJoin(RFC2396_BASE, 'g?y', 'http://a/b/c/g?y') self.checkJoin(RFC2396_BASE, '#s', 'http://a/b/c/d;p?q#s') self.checkJoin(RFC2396_BASE, 'g#s', 'http://a/b/c/g#s') self.checkJoin(RFC2396_BASE, 'g?y#s', 'http://a/b/c/g?y#s') self.checkJoin(RFC2396_BASE, 'g;x', 'http://a/b/c/g;x') self.checkJoin(RFC2396_BASE, 'g;x?y#s', 'http://a/b/c/g;x?y#s') self.checkJoin(RFC2396_BASE, '.', 'http://a/b/c/') self.checkJoin(RFC2396_BASE, './', 'http://a/b/c/') self.checkJoin(RFC2396_BASE, '..', 'http://a/b/') self.checkJoin(RFC2396_BASE, '../', 'http://a/b/') self.checkJoin(RFC2396_BASE, '../g', 'http://a/b/g') self.checkJoin(RFC2396_BASE, '../..', 'http://a/') self.checkJoin(RFC2396_BASE, '../../', 'http://a/') self.checkJoin(RFC2396_BASE, '../../g', 'http://a/g') self.checkJoin(RFC2396_BASE, '', RFC2396_BASE) self.checkJoin(RFC2396_BASE, '../../../g', 'http://a/../g') self.checkJoin(RFC2396_BASE, '../../../../g', 'http://a/../../g') self.checkJoin(RFC2396_BASE, '/./g', 'http://a/./g') self.checkJoin(RFC2396_BASE, '/../g', 'http://a/../g') self.checkJoin(RFC2396_BASE, 'g.', 'http://a/b/c/g.') self.checkJoin(RFC2396_BASE, '.g', 'http://a/b/c/.g') self.checkJoin(RFC2396_BASE, 'g..', 'http://a/b/c/g..') self.checkJoin(RFC2396_BASE, '..g', 'http://a/b/c/..g') self.checkJoin(RFC2396_BASE, './../g', 'http://a/b/g') self.checkJoin(RFC2396_BASE, './g/.', 'http://a/b/c/g/') self.checkJoin(RFC2396_BASE, 'g/./h', 'http://a/b/c/g/h') self.checkJoin(RFC2396_BASE, 'g/../h', 'http://a/b/c/h') self.checkJoin(RFC2396_BASE, 'g;x=1/./y', 'http://a/b/c/g;x=1/y') self.checkJoin(RFC2396_BASE, 'g;x=1/../y', 'http://a/b/c/y') self.checkJoin(RFC2396_BASE, 'g?y/./x', 'http://a/b/c/g?y/./x') self.checkJoin(RFC2396_BASE, 'g?y/../x', 'http://a/b/c/g?y/../x') self.checkJoin(RFC2396_BASE, 'g#s/./x', 'http://a/b/c/g#s/./x') self.checkJoin(RFC2396_BASE, 'g#s/../x', 'http://a/b/c/g#s/../x') def test_RFC3986(self): # Test cases from RFC3986 self.checkJoin(RFC3986_BASE, '?y','http://a/b/c/d;p?y') self.checkJoin(RFC2396_BASE, ';x', 'http://a/b/c/;x') self.checkJoin(RFC3986_BASE, 'g:h','g:h') self.checkJoin(RFC3986_BASE, 'g','http://a/b/c/g') self.checkJoin(RFC3986_BASE, './g','http://a/b/c/g') self.checkJoin(RFC3986_BASE, 'g/','http://a/b/c/g/') self.checkJoin(RFC3986_BASE, '/g','http://a/g') self.checkJoin(RFC3986_BASE, '//g','http://g') self.checkJoin(RFC3986_BASE, '?y','http://a/b/c/d;p?y') self.checkJoin(RFC3986_BASE, 'g?y','http://a/b/c/g?y') self.checkJoin(RFC3986_BASE, '#s','http://a/b/c/d;p?q#s') self.checkJoin(RFC3986_BASE, 'g#s','http://a/b/c/g#s') self.checkJoin(RFC3986_BASE, 'g?y#s','http://a/b/c/g?y#s') self.checkJoin(RFC3986_BASE, ';x','http://a/b/c/;x') self.checkJoin(RFC3986_BASE, 'g;x','http://a/b/c/g;x') self.checkJoin(RFC3986_BASE, 'g;x?y#s','http://a/b/c/g;x?y#s') self.checkJoin(RFC3986_BASE, '','http://a/b/c/d;p?q') self.checkJoin(RFC3986_BASE, '.','http://a/b/c/') self.checkJoin(RFC3986_BASE, './','http://a/b/c/') self.checkJoin(RFC3986_BASE, '..','http://a/b/') self.checkJoin(RFC3986_BASE, '../','http://a/b/') self.checkJoin(RFC3986_BASE, '../g','http://a/b/g') self.checkJoin(RFC3986_BASE, '../..','http://a/') self.checkJoin(RFC3986_BASE, '../../','http://a/') self.checkJoin(RFC3986_BASE, '../../g','http://a/g') #Abnormal Examples # The 'abnormal scenarios' are incompatible with RFC2986 parsing # Tests are here for reference. #self.checkJoin(RFC3986_BASE, '../../../g','http://a/g') #self.checkJoin(RFC3986_BASE, '../../../../g','http://a/g') #self.checkJoin(RFC3986_BASE, '/./g','http://a/g') #self.checkJoin(RFC3986_BASE, '/../g','http://a/g') self.checkJoin(RFC3986_BASE, 'g.','http://a/b/c/g.') self.checkJoin(RFC3986_BASE, '.g','http://a/b/c/.g') self.checkJoin(RFC3986_BASE, 'g..','http://a/b/c/g..') self.checkJoin(RFC3986_BASE, '..g','http://a/b/c/..g') self.checkJoin(RFC3986_BASE, './../g','http://a/b/g') self.checkJoin(RFC3986_BASE, './g/.','http://a/b/c/g/') self.checkJoin(RFC3986_BASE, 'g/./h','http://a/b/c/g/h') self.checkJoin(RFC3986_BASE, 'g/../h','http://a/b/c/h') self.checkJoin(RFC3986_BASE, 'g;x=1/./y','http://a/b/c/g;x=1/y') self.checkJoin(RFC3986_BASE, 'g;x=1/../y','http://a/b/c/y') self.checkJoin(RFC3986_BASE, 'g?y/./x','http://a/b/c/g?y/./x') self.checkJoin(RFC3986_BASE, 'g?y/../x','http://a/b/c/g?y/../x') self.checkJoin(RFC3986_BASE, 'g#s/./x','http://a/b/c/g#s/./x') self.checkJoin(RFC3986_BASE, 'g#s/../x','http://a/b/c/g#s/../x') #self.checkJoin(RFC3986_BASE, 'http:g','http:g') # strict parser self.checkJoin(RFC3986_BASE, 'http:g','http://a/b/c/g') #relaxed parser # Test for issue9721 self.checkJoin('http://a/b/c/de', ';x','http://a/b/c/;x') def test_urljoins(self): self.checkJoin(SIMPLE_BASE, 'g:h','g:h') self.checkJoin(SIMPLE_BASE, 'http:g','http://a/b/c/g') self.checkJoin(SIMPLE_BASE, 'http:','http://a/b/c/d') self.checkJoin(SIMPLE_BASE, 'g','http://a/b/c/g') self.checkJoin(SIMPLE_BASE, './g','http://a/b/c/g') self.checkJoin(SIMPLE_BASE, 'g/','http://a/b/c/g/') self.checkJoin(SIMPLE_BASE, '/g','http://a/g') self.checkJoin(SIMPLE_BASE, '//g','http://g') self.checkJoin(SIMPLE_BASE, '?y','http://a/b/c/d?y') self.checkJoin(SIMPLE_BASE, 'g?y','http://a/b/c/g?y') self.checkJoin(SIMPLE_BASE, 'g?y/./x','http://a/b/c/g?y/./x') self.checkJoin(SIMPLE_BASE, '.','http://a/b/c/') self.checkJoin(SIMPLE_BASE, './','http://a/b/c/') self.checkJoin(SIMPLE_BASE, '..','http://a/b/') self.checkJoin(SIMPLE_BASE, '../','http://a/b/') self.checkJoin(SIMPLE_BASE, '../g','http://a/b/g') self.checkJoin(SIMPLE_BASE, '../..','http://a/') self.checkJoin(SIMPLE_BASE, '../../g','http://a/g') self.checkJoin(SIMPLE_BASE, '../../../g','http://a/../g') self.checkJoin(SIMPLE_BASE, './../g','http://a/b/g') self.checkJoin(SIMPLE_BASE, './g/.','http://a/b/c/g/') self.checkJoin(SIMPLE_BASE, '/./g','http://a/./g') self.checkJoin(SIMPLE_BASE, 'g/./h','http://a/b/c/g/h') self.checkJoin(SIMPLE_BASE, 'g/../h','http://a/b/c/h') self.checkJoin(SIMPLE_BASE, 'http:g','http://a/b/c/g') self.checkJoin(SIMPLE_BASE, 'http:','http://a/b/c/d') self.checkJoin(SIMPLE_BASE, 'http:?y','http://a/b/c/d?y') self.checkJoin(SIMPLE_BASE, 'http:g?y','http://a/b/c/g?y') self.checkJoin(SIMPLE_BASE, 'http:g?y/./x','http://a/b/c/g?y/./x') self.checkJoin('http:///', '..','http:///') self.checkJoin('', 'http://a/b/c/g?y/./x','http://a/b/c/g?y/./x') self.checkJoin('', 'http://a/./g', 'http://a/./g') self.checkJoin('svn://pathtorepo/dir1', 'dir2', 'svn://pathtorepo/dir2') self.checkJoin('svn+ssh://pathtorepo/dir1', 'dir2', 'svn+ssh://pathtorepo/dir2') def test_RFC2732(self): str_cases = [ ('http://Test.python.org:5432/foo/', 'test.python.org', 5432), ('http://12.34.56.78:5432/foo/', '12.34.56.78', 5432), ('http://[::1]:5432/foo/', '::1', 5432), ('http://[dead:beef::1]:5432/foo/', 'dead:beef::1', 5432), ('http://[dead:beef::]:5432/foo/', 'dead:beef::', 5432), ('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]:5432/foo/', 'dead:beef:cafe:5417:affe:8fa3:deaf:feed', 5432), ('http://[::12.34.56.78]:5432/foo/', '::12.34.56.78', 5432), ('http://[::ffff:12.34.56.78]:5432/foo/', '::ffff:12.34.56.78', 5432), ('http://Test.python.org/foo/', 'test.python.org', None), ('http://12.34.56.78/foo/', '12.34.56.78', None), ('http://[::1]/foo/', '::1', None), ('http://[dead:beef::1]/foo/', 'dead:beef::1', None), ('http://[dead:beef::]/foo/', 'dead:beef::', None), ('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/', 'dead:beef:cafe:5417:affe:8fa3:deaf:feed', None), ('http://[::12.34.56.78]/foo/', '::12.34.56.78', None), ('http://[::ffff:12.34.56.78]/foo/', '::ffff:12.34.56.78', None), ] def _encode(t): return t[0].encode('ascii'), t[1].encode('ascii'), t[2] bytes_cases = [_encode(x) for x in str_cases] for url, hostname, port in str_cases + bytes_cases: urlparsed = urllib.parse.urlparse(url) self.assertEqual((urlparsed.hostname, urlparsed.port) , (hostname, port)) str_cases = [ 'http://::12.34.56.78]/', 'http://[::1/foo/', 'ftp://[::1/foo/bad]/bad', 'http://[::1/foo/bad]/bad', 'http://[::ffff:12.34.56.78'] bytes_cases = [x.encode('ascii') for x in str_cases] for invalid_url in str_cases + bytes_cases: self.assertRaises(ValueError, urllib.parse.urlparse, invalid_url) def test_urldefrag(self): str_cases = [ ('http://python.org#frag', 'http://python.org', 'frag'), ('http://python.org', 'http://python.org', ''), ('http://python.org/#frag', 'http://python.org/', 'frag'), ('http://python.org/', 'http://python.org/', ''), ('http://python.org/?q#frag', 'http://python.org/?q', 'frag'), ('http://python.org/?q', 'http://python.org/?q', ''), ('http://python.org/p#frag', 'http://python.org/p', 'frag'), ('http://python.org/p?q', 'http://python.org/p?q', ''), (RFC1808_BASE, 'http://a/b/c/d;p?q', 'f'), (RFC2396_BASE, 'http://a/b/c/d;p?q', ''), ] def _encode(t): return type(t)(x.encode('ascii') for x in t) bytes_cases = [_encode(x) for x in str_cases] for url, defrag, frag in str_cases + bytes_cases: result = urllib.parse.urldefrag(url) self.assertEqual(result.geturl(), url) self.assertEqual(result, (defrag, frag)) self.assertEqual(result.url, defrag) self.assertEqual(result.fragment, frag) def test_urlsplit_attributes(self): url = "HTTP://WWW.PYTHON.ORG/doc/#frag" p = urllib.parse.urlsplit(url) self.assertEqual(p.scheme, "http") self.assertEqual(p.netloc, "WWW.PYTHON.ORG") self.assertEqual(p.path, "/doc/") self.assertEqual(p.query, "") self.assertEqual(p.fragment, "frag") self.assertEqual(p.username, None) self.assertEqual(p.password, None) self.assertEqual(p.hostname, "www.python.org") self.assertEqual(p.port, None) # geturl() won't return exactly the original URL in this case # since the scheme is always case-normalized # We handle this by ignoring the first 4 characters of the URL self.assertEqual(p.geturl()[4:], url[4:]) url = "http://User:Pass@www.python.org:080/doc/?query=yes#frag" p = urllib.parse.urlsplit(url) self.assertEqual(p.scheme, "http") self.assertEqual(p.netloc, "User:Pass@www.python.org:080") self.assertEqual(p.path, "/doc/") self.assertEqual(p.query, "query=yes") self.assertEqual(p.fragment, "frag") self.assertEqual(p.username, "User") self.assertEqual(p.password, "Pass") self.assertEqual(p.hostname, "www.python.org") self.assertEqual(p.port, 80) self.assertEqual(p.geturl(), url) # Addressing issue1698, which suggests Username can contain # "@" characters. Though not RFC compliant, many ftp sites allow # and request email addresses as usernames. url = "http://User@example.com:Pass@www.python.org:080/doc/?query=yes#frag" p = urllib.parse.urlsplit(url) self.assertEqual(p.scheme, "http") self.assertEqual(p.netloc, "User@example.com:Pass@www.python.org:080") self.assertEqual(p.path, "/doc/") self.assertEqual(p.query, "query=yes") self.assertEqual(p.fragment, "frag") self.assertEqual(p.username, "User@example.com") self.assertEqual(p.password, "Pass") self.assertEqual(p.hostname, "www.python.org") self.assertEqual(p.port, 80) self.assertEqual(p.geturl(), url) # And check them all again, only with bytes this time url = b"HTTP://WWW.PYTHON.ORG/doc/#frag" p = urllib.parse.urlsplit(url) self.assertEqual(p.scheme, b"http") self.assertEqual(p.netloc, b"WWW.PYTHON.ORG") self.assertEqual(p.path, b"/doc/") self.assertEqual(p.query, b"") self.assertEqual(p.fragment, b"frag") self.assertEqual(p.username, None) self.assertEqual(p.password, None) self.assertEqual(p.hostname, b"www.python.org") self.assertEqual(p.port, None) self.assertEqual(p.geturl()[4:], url[4:]) url = b"http://User:Pass@www.python.org:080/doc/?query=yes#frag" p = urllib.parse.urlsplit(url) self.assertEqual(p.scheme, b"http") self.assertEqual(p.netloc, b"User:Pass@www.python.org:080") self.assertEqual(p.path, b"/doc/") self.assertEqual(p.query, b"query=yes") self.assertEqual(p.fragment, b"frag") self.assertEqual(p.username, b"User") self.assertEqual(p.password, b"Pass") self.assertEqual(p.hostname, b"www.python.org") self.assertEqual(p.port, 80) self.assertEqual(p.geturl(), url) url = b"http://User@example.com:Pass@www.python.org:080/doc/?query=yes#frag" p = urllib.parse.urlsplit(url) self.assertEqual(p.scheme, b"http") self.assertEqual(p.netloc, b"User@example.com:Pass@www.python.org:080") self.assertEqual(p.path, b"/doc/") self.assertEqual(p.query, b"query=yes") self.assertEqual(p.fragment, b"frag") self.assertEqual(p.username, b"User@example.com") self.assertEqual(p.password, b"Pass") self.assertEqual(p.hostname, b"www.python.org") self.assertEqual(p.port, 80) self.assertEqual(p.geturl(), url) def test_attributes_bad_port(self): """Check handling of non-integer ports.""" p = urllib.parse.urlsplit("http://www.example.net:foo") self.assertEqual(p.netloc, "www.example.net:foo") self.assertRaises(ValueError, lambda: p.port) p = urllib.parse.urlparse("http://www.example.net:foo") self.assertEqual(p.netloc, "www.example.net:foo") self.assertRaises(ValueError, lambda: p.port) # Once again, repeat ourselves to test bytes p = urllib.parse.urlsplit(b"http://www.example.net:foo") self.assertEqual(p.netloc, b"www.example.net:foo") self.assertRaises(ValueError, lambda: p.port) p = urllib.parse.urlparse(b"http://www.example.net:foo") self.assertEqual(p.netloc, b"www.example.net:foo") self.assertRaises(ValueError, lambda: p.port) def test_attributes_without_netloc(self): # This example is straight from RFC 3261. It looks like it # should allow the username, hostname, and port to be filled # in, but doesn't. Since it's a URI and doesn't use the # scheme://netloc syntax, the netloc and related attributes # should be left empty. uri = "sip:alice@atlanta.com;maddr=239.255.255.1;ttl=15" p = urllib.parse.urlsplit(uri) self.assertEqual(p.netloc, "") self.assertEqual(p.username, None) self.assertEqual(p.password, None) self.assertEqual(p.hostname, None) self.assertEqual(p.port, None) self.assertEqual(p.geturl(), uri) p = urllib.parse.urlparse(uri) self.assertEqual(p.netloc, "") self.assertEqual(p.username, None) self.assertEqual(p.password, None) self.assertEqual(p.hostname, None) self.assertEqual(p.port, None) self.assertEqual(p.geturl(), uri) # You guessed it, repeating the test with bytes input uri = b"sip:alice@atlanta.com;maddr=239.255.255.1;ttl=15" p = urllib.parse.urlsplit(uri) self.assertEqual(p.netloc, b"") self.assertEqual(p.username, None) self.assertEqual(p.password, None) self.assertEqual(p.hostname, None) self.assertEqual(p.port, None) self.assertEqual(p.geturl(), uri) p = urllib.parse.urlparse(uri) self.assertEqual(p.netloc, b"") self.assertEqual(p.username, None) self.assertEqual(p.password, None) self.assertEqual(p.hostname, None) self.assertEqual(p.port, None) self.assertEqual(p.geturl(), uri) def test_noslash(self): # Issue 1637: http://foo.com?query is legal self.assertEqual(urllib.parse.urlparse("http://example.com?blahblah=/foo"), ('http', 'example.com', '', '', 'blahblah=/foo', '')) self.assertEqual(urllib.parse.urlparse(b"http://example.com?blahblah=/foo"), (b'http', b'example.com', b'', b'', b'blahblah=/foo', b'')) def test_withoutscheme(self): # Test urlparse without scheme # Issue 754016: urlparse goes wrong with IP:port without scheme # RFC 1808 specifies that netloc should start with //, urlparse expects # the same, otherwise it classifies the portion of url as path. self.assertEqual(urllib.parse.urlparse("path"), ('','','path','','','')) self.assertEqual(urllib.parse.urlparse("//www.python.org:80"), ('','www.python.org:80','','','','')) self.assertEqual(urllib.parse.urlparse("http://www.python.org:80"), ('http','www.python.org:80','','','','')) # Repeat for bytes input self.assertEqual(urllib.parse.urlparse(b"path"), (b'',b'',b'path',b'',b'',b'')) self.assertEqual(urllib.parse.urlparse(b"//www.python.org:80"), (b'',b'www.python.org:80',b'',b'',b'',b'')) self.assertEqual(urllib.parse.urlparse(b"http://www.python.org:80"), (b'http',b'www.python.org:80',b'',b'',b'',b'')) def test_portseparator(self): # Issue 754016 makes changes for port separator ':' from scheme separator self.assertEqual(urllib.parse.urlparse("path:80"), ('','','path:80','','','')) self.assertEqual(urllib.parse.urlparse("http:"),('http','','','','','')) self.assertEqual(urllib.parse.urlparse("https:"),('https','','','','','')) self.assertEqual(urllib.parse.urlparse("http://www.python.org:80"), ('http','www.python.org:80','','','','')) # As usual, need to check bytes input as well self.assertEqual(urllib.parse.urlparse(b"path:80"), (b'',b'',b'path:80',b'',b'',b'')) self.assertEqual(urllib.parse.urlparse(b"http:"),(b'http',b'',b'',b'',b'',b'')) self.assertEqual(urllib.parse.urlparse(b"https:"),(b'https',b'',b'',b'',b'',b'')) self.assertEqual(urllib.parse.urlparse(b"http://www.python.org:80"), (b'http',b'www.python.org:80',b'',b'',b'',b'')) def test_usingsys(self): # Issue 3314: sys module is used in the error self.assertRaises(TypeError, urllib.parse.urlencode, "foo") def test_anyscheme(self): # Issue 7904: s3://foo.com/stuff has netloc "foo.com". self.assertEqual(urllib.parse.urlparse("s3://foo.com/stuff"), ('s3', 'foo.com', '/stuff', '', '', '')) self.assertEqual(urllib.parse.urlparse("x-newscheme://foo.com/stuff"), ('x-newscheme', 'foo.com', '/stuff', '', '', '')) # And for bytes... self.assertEqual(urllib.parse.urlparse(b"s3://foo.com/stuff"), (b's3', b'foo.com', b'/stuff', b'', b'', b'')) self.assertEqual(urllib.parse.urlparse(b"x-newscheme://foo.com/stuff"), (b'x-newscheme', b'foo.com', b'/stuff', b'', b'', b'')) def test_mixed_types_rejected(self): # Several functions that process either strings or ASCII encoded bytes # accept multiple arguments. Check they reject mixed type input with self.assertRaisesRegex(TypeError, "Cannot mix str"): urllib.parse.urlparse("www.python.org", b"http") with self.assertRaisesRegex(TypeError, "Cannot mix str"): urllib.parse.urlparse(b"www.python.org", "http") with self.assertRaisesRegex(TypeError, "Cannot mix str"): urllib.parse.urlsplit("www.python.org", b"http") with self.assertRaisesRegex(TypeError, "Cannot mix str"): urllib.parse.urlsplit(b"www.python.org", "http") with self.assertRaisesRegex(TypeError, "Cannot mix str"): urllib.parse.urlunparse(( b"http", "www.python.org","","","","")) with self.assertRaisesRegex(TypeError, "Cannot mix str"): urllib.parse.urlunparse(("http", b"www.python.org","","","","")) with self.assertRaisesRegex(TypeError, "Cannot mix str"): urllib.parse.urlunsplit((b"http", "www.python.org","","","")) with self.assertRaisesRegex(TypeError, "Cannot mix str"): urllib.parse.urlunsplit(("http", b"www.python.org","","","")) with self.assertRaisesRegex(TypeError, "Cannot mix str"): urllib.parse.urljoin("http://python.org", b"http://python.org") with self.assertRaisesRegex(TypeError, "Cannot mix str"): urllib.parse.urljoin(b"http://python.org", "http://python.org") def _check_result_type(self, str_type): num_args = len(str_type._fields) bytes_type = str_type._encoded_counterpart self.assertIs(bytes_type._decoded_counterpart, str_type) str_args = ('',) * num_args bytes_args = (b'',) * num_args str_result = str_type(*str_args) bytes_result = bytes_type(*bytes_args) encoding = 'ascii' errors = 'strict' self.assertEqual(str_result, str_args) self.assertEqual(bytes_result.decode(), str_args) self.assertEqual(bytes_result.decode(), str_result) self.assertEqual(bytes_result.decode(encoding), str_args) self.assertEqual(bytes_result.decode(encoding), str_result) self.assertEqual(bytes_result.decode(encoding, errors), str_args) self.assertEqual(bytes_result.decode(encoding, errors), str_result) self.assertEqual(bytes_result, bytes_args) self.assertEqual(str_result.encode(), bytes_args) self.assertEqual(str_result.encode(), bytes_result) self.assertEqual(str_result.encode(encoding), bytes_args) self.assertEqual(str_result.encode(encoding), bytes_result) self.assertEqual(str_result.encode(encoding, errors), bytes_args) self.assertEqual(str_result.encode(encoding, errors), bytes_result) def test_result_pairs(self): # Check encoding and decoding between result pairs result_types = [ urllib.parse.DefragResult, urllib.parse.SplitResult, urllib.parse.ParseResult, ] for result_type in result_types: self._check_result_type(result_type) def test_parse_qs_encoding(self): result = urllib.parse.parse_qs("key=\u0141%E9", encoding="latin-1") self.assertEqual(result, {'key': ['\u0141\xE9']}) result = urllib.parse.parse_qs("key=\u0141%C3%A9", encoding="utf-8") self.assertEqual(result, {'key': ['\u0141\xE9']}) result = urllib.parse.parse_qs("key=\u0141%C3%A9", encoding="ascii") self.assertEqual(result, {'key': ['\u0141\ufffd\ufffd']}) result = urllib.parse.parse_qs("key=\u0141%E9-", encoding="ascii") self.assertEqual(result, {'key': ['\u0141\ufffd-']}) result = urllib.parse.parse_qs("key=\u0141%E9-", encoding="ascii", errors="ignore") self.assertEqual(result, {'key': ['\u0141-']}) def test_parse_qsl_encoding(self): result = urllib.parse.parse_qsl("key=\u0141%E9", encoding="latin-1") self.assertEqual(result, [('key', '\u0141\xE9')]) result = urllib.parse.parse_qsl("key=\u0141%C3%A9", encoding="utf-8") self.assertEqual(result, [('key', '\u0141\xE9')]) result = urllib.parse.parse_qsl("key=\u0141%C3%A9", encoding="ascii") self.assertEqual(result, [('key', '\u0141\ufffd\ufffd')]) result = urllib.parse.parse_qsl("key=\u0141%E9-", encoding="ascii") self.assertEqual(result, [('key', '\u0141\ufffd-')]) result = urllib.parse.parse_qsl("key=\u0141%E9-", encoding="ascii", errors="ignore") self.assertEqual(result, [('key', '\u0141-')]) def test_splitnport(self): # Normal cases are exercised by other tests; ensure that we also # catch cases with no port specified. (testcase ensuring coverage) result = urllib.parse.splitnport('parrot:88') self.assertEqual(result, ('parrot', 88)) result = urllib.parse.splitnport('parrot') self.assertEqual(result, ('parrot', -1)) result = urllib.parse.splitnport('parrot', 55) self.assertEqual(result, ('parrot', 55)) result = urllib.parse.splitnport('parrot:') self.assertEqual(result, ('parrot', None)) def test_splitquery(self): # Normal cases are exercised by other tests; ensure that we also # catch cases with no port specified (testcase ensuring coverage) result = urllib.parse.splitquery('http://python.org/fake?foo=bar') self.assertEqual(result, ('http://python.org/fake', 'foo=bar')) result = urllib.parse.splitquery('http://python.org/fake?foo=bar?') self.assertEqual(result, ('http://python.org/fake?foo=bar', '')) result = urllib.parse.splitquery('http://python.org/fake') self.assertEqual(result, ('http://python.org/fake', None)) def test_splitvalue(self): # Normal cases are exercised by other tests; test pathological cases # with no key/value pairs. (testcase ensuring coverage) result = urllib.parse.splitvalue('foo=bar') self.assertEqual(result, ('foo', 'bar')) result = urllib.parse.splitvalue('foo=') self.assertEqual(result, ('foo', '')) result = urllib.parse.splitvalue('foobar') self.assertEqual(result, ('foobar', None)) def test_to_bytes(self): result = urllib.parse.to_bytes('http://www.python.org') self.assertEqual(result, 'http://www.python.org') self.assertRaises(UnicodeError, urllib.parse.to_bytes, 'http://www.python.org/medi\u00e6val') def test_urlencode_sequences(self): # Other tests incidentally urlencode things; test non-covered cases: # Sequence and object values. result = urllib.parse.urlencode({'a': [1, 2], 'b': (3, 4, 5)}, True) self.assertEqual(result, 'a=1&a=2&b=3&b=4&b=5') class Trivial: def __str__(self): return 'trivial' result = urllib.parse.urlencode({'a': Trivial()}, True) self.assertEqual(result, 'a=trivial') def test_quote_from_bytes(self): self.assertRaises(TypeError, urllib.parse.quote_from_bytes, 'foo') result = urllib.parse.quote_from_bytes(b'archaeological arcana') self.assertEqual(result, 'archaeological%20arcana') result = urllib.parse.quote_from_bytes(b'') self.assertEqual(result, '') def test_unquote_to_bytes(self): result = urllib.parse.unquote_to_bytes('abc%20def') self.assertEqual(result, b'abc def') result = urllib.parse.unquote_to_bytes('') self.assertEqual(result, b'') def test_quote_errors(self): self.assertRaises(TypeError, urllib.parse.quote, b'foo', encoding='utf-8') self.assertRaises(TypeError, urllib.parse.quote, b'foo', errors='strict') def test_main(): support.run_unittest(UrlParseTestCase) if __name__ == "__main__": test_main()
lgpl-3.0
krull/docker-zenoss4
init_fs/usr/local/zenoss/ZenPacks/ZenPacks.zenoss.Microsoft.Windows-2.6.9.egg/ZenPacks/zenoss/Microsoft/Windows/tests/testSoftware.py
1
3354
#!/usr/bin/env python # coding=utf-8 ############################################################################## # # Copyright (C) Zenoss, Inc. 2014, all rights reserved. # # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. # ############################################################################## from ZenPacks.zenoss.Microsoft.Windows.tests.mock import Mock from ZenPacks.zenoss.Microsoft.Windows.tests.utils import StringAttributeObject from Products.ZenTestCase.BaseTestCase import BaseTestCase from ZenPacks.zenoss.Microsoft.Windows.modeler.plugins.zenoss.winrm.Software import Software class TestProcesses(BaseTestCase): def setUp(self): self.plugin = Software() self.results = dict(software=Mock(stdout=['DisplayName=;InstallDate=;Vendor=|', 'DisplayName=?????????? Microsoft Report Viewer ??? Visual Studio 2013;' 'InstallDate=20150710;Vendor=Microsoft Corporation |', 'DisplayName=Visual Studio 2013? Microsoft Report Viewer ?? ??;' 'InstallDate=20150710;Vendor=Microsoft Corporation |', 'DisplayName=Soft x86 - 1.0.0;' 'InstallDate=19700101;' 'Vendor=Sunway Systems|', 'DisplayName=Soft x86 - 1.0.0;' 'InstallDate=19700101;' 'Vendor=Вендор|', 'DisplayName=;xxx;yyy|', 'DisplayName=Software;InstallDate=;Vendor=SoftCorp' ])) self.device = StringAttributeObject() def test_process(self): data = self.plugin.process(self.device, self.results, Mock()) self.assertEquals(data.maps[0].id, 'Microsoft Report Viewer _ Visual Studio 2013') self.assertEquals(data.maps[0].setProductKey.args, ('Microsoft Report Viewer _ Visual Studio 2013', 'Microsoft Corporation')) self.assertEquals(data.maps[1].id, 'Visual Studio 2013_ Microsoft Report Viewer') self.assertEquals(data.maps[1].setProductKey.args, ('Visual Studio 2013_ Microsoft Report Viewer', 'Microsoft Corporation')) self.assertEquals(data.maps[2].id, 'Soft x86 - 1.0.0') self.assertEquals(data.maps[2].setInstallDate, '1970/01/01 00:00:00') self.assertTupleEqual(data.maps[2].setProductKey.args, ('Soft x86 - 1.0.0', 'Sunway Systems')) self.assertTupleEqual(data.maps[3].setProductKey.args, ('Soft x86 - 1.0.0', 'Unknown')) self.assertFalse(hasattr(data.maps[4], 'setInstallDate')) self.assertEquals(len(data.maps), 5) def test_suite(): from unittest import TestSuite, makeSuite suite = TestSuite() suite.addTest(makeSuite(TestProcesses)) return suite if __name__ == "__main__": from zope.testrunner.runner import Runner runner = Runner(found_suites=[test_suite()]) runner.run()
gpl-3.0
w1kke/pylearn2
pylearn2/packaged_dependencies/theano_linear/imaging.py
44
7922
""" .. todo:: WRITEME """ import logging import sys import numpy from theano.compat.six.moves import xrange from pylearn2.utils.image import Image, ensure_Image logger = logging.getLogger(__name__) def scale_to_unit_interval(ndar,eps=1e-8): """ .. todo:: WRITEME """ ndar = ndar.copy() ndar -= ndar.min() ndar *= 1.0 / max(ndar.max(),eps) return ndar def tile_raster_images(X, img_shape, tile_shape=None, tile_spacing=(1,1), scale_rows_to_unit_interval=True, output_pixel_vals=True, min_dynamic_range=1e-4, ): """ Transform an array with one flattened image per row, into an array in which images are reshaped and layed out like tiles on a floor. This function is useful for visualizing datasets whose rows are images, and also columns of matrices for transforming those rows (such as the first layer of a neural net). Parameters ---------- X : numpy.ndarray or tuple of 4 channels or None A 2-D array in which every row is a flattened image. img_shape : tuple The original shape of each image tile_shape: tuple The number of images to tile (rows, cols). Defaults to a square-ish \ shape with the right area for the number of images. min_dynamic_range: float, positive Dynamic range of each image is used in scaling to the unit interval, \ but images with less dynamic range than this will be scaled as if \ this were the dynamic range. Returns ------- out_array : 2D array with same dtype as X Array suitable for viewing as an image (See:`PIL.Image.fromarray`). """ # This is premature when tile_slices_to_image is not documented at all yet, # but ultimately true: #print >> sys.stderr, "WARN: tile_raster_images sucks, use tile_slices_to_image" if len(img_shape)==3 and img_shape[2]==3: # make this save an rgb image if scale_rows_to_unit_interval: logger.warning("tile_raster_images' scaling routine " "messes up colour - try tile_slices_to_image") return tile_raster_images( (X[:,0::3], X[:,1::3], X[:,2::3], None), img_shape=img_shape[:2], tile_shape=tile_shape, tile_spacing=tile_spacing, scale_rows_to_unit_interval=scale_rows_to_unit_interval, output_pixel_vals=output_pixel_vals, min_dynamic_range=min_dynamic_range) if isinstance(X, tuple): n_images_in_x = X[0].shape[0] else: n_images_in_x = X.shape[0] if tile_shape is None: tile_shape = most_square_shape(n_images_in_x) assert len(img_shape) == 2 assert len(tile_shape) == 2 assert len(tile_spacing) == 2 #out_shape is the shape in pixels of the returned image array out_shape = [(ishp + tsp) * tshp - tsp for ishp, tshp, tsp in zip(img_shape, tile_shape, tile_spacing)] if isinstance(X, tuple): if scale_rows_to_unit_interval: raise NotImplementedError() assert len(X) == 4 if output_pixel_vals: out_array = numpy.zeros((out_shape[0], out_shape[1], 4), dtype='uint8') else: out_array = numpy.zeros((out_shape[0], out_shape[1], 4), dtype=X.dtype) #colors default to 0, alpha defaults to 1 (opaque) if output_pixel_vals: channel_defaults = [0,0,0,255] else: channel_defaults = [0.,0.,0.,1.] for i in xrange(4): if X[i] is None: out_array[:,:,i] = numpy.zeros(out_shape, dtype='uint8' if output_pixel_vals else out_array.dtype )+channel_defaults[i] else: out_array[:,:,i] = tile_raster_images(X[i], img_shape, tile_shape, tile_spacing, scale_rows_to_unit_interval, output_pixel_vals) return out_array else: H, W = img_shape Hs, Ws = tile_spacing out_scaling = 1 if output_pixel_vals and str(X.dtype).startswith('float'): out_scaling = 255 out_array = numpy.zeros(out_shape, dtype='uint8' if output_pixel_vals else X.dtype) for tile_row in xrange(tile_shape[0]): for tile_col in xrange(tile_shape[1]): if tile_row * tile_shape[1] + tile_col < X.shape[0]: if scale_rows_to_unit_interval: try: this_img = scale_to_unit_interval( X[tile_row * tile_shape[1] + tile_col].reshape(img_shape), eps=min_dynamic_range) except ValueError: raise ValueError('Failed to reshape array of shape %s to shape %s' % ( X[tile_row*tile_shape[1] + tile_col].shape , img_shape )) else: this_img = X[tile_row * tile_shape[1] + tile_col].reshape(img_shape) out_array[ tile_row * (H+Hs):tile_row*(H+Hs)+H, tile_col * (W+Ws):tile_col*(W+Ws)+W ] \ = this_img * out_scaling return out_array def most_square_shape(N): """ Return a rectangle (height, width) with area N that is closest to square. Parameters ---------- N : int WRITEME Returns ------- WRITEME """ for i in xrange(int(numpy.sqrt(N)),0, -1): if 0 == N % i: return (i, N/i) def save_tiled_raster_images(tiled_img, filename): """ Save a a return value from `tile_raster_images` to `filename`. Returns ------- img : WRITEME The PIL image that was saved """ if tiled_img.ndim==2: ensure_Image() img = Image.fromarray( tiled_img, 'L') elif tiled_img.ndim==3: ensure_Image() img = Image.fromarray(tiled_img, 'RGBA') else: raise TypeError('bad ndim', tiled_img) img.save(filename) return img def tile_slices_to_image_uint8(X, tile_shape=None): """ .. todo:: WRITEME """ if str(X.dtype) != 'uint8': raise TypeError(X) if tile_shape is None: #how many tile rows and cols (TR, TC) = most_square_shape(X.shape[0]) H, W = X.shape[1], X.shape[2] Hs = H+1 #spacing between tiles Ws = W+1 #spacing between tiles trows, tcols= most_square_shape(X.shape[0]) outrows = trows * Hs - 1 outcols = tcols * Ws - 1 out = numpy.zeros((outrows, outcols,3), dtype='uint8') tr_stride= 1+X.shape[1] for tr in range(trows): for tc in range(tcols): Xrc = X[tr*tcols+tc] if Xrc.ndim==2: # if no color channel make it broadcast Xrc=Xrc[:,:,None] #print Xrc.shape #print out[tr*Hs:tr*Hs+H,tc*Ws:tc*Ws+W].shape out[tr*Hs:tr*Hs+H,tc*Ws:tc*Ws+W] = Xrc ensure_Image() img = Image.fromarray(out, 'RGB') return img def tile_slices_to_image(X, tile_shape=None, scale_each=True, min_dynamic_range=1e-4): """ .. todo:: WRITEME """ #always returns an RGB image def scale_0_255(x): xmin = x.min() xmax = x.max() return numpy.asarray( 255 * (x - xmin) / max(xmax - xmin, min_dynamic_range), dtype='uint8') if scale_each: uintX = numpy.empty(X.shape, dtype='uint8') for i, Xi in enumerate(X): uintX[i] = scale_0_255(Xi) X = uintX else: X = scale_0_255(X) return tile_slices_to_image_uint8(X, tile_shape=tile_shape)
bsd-3-clause
paxapy/zulip
zerver/management/commands/remove_users_from_stream.py
2
2713
from __future__ import absolute_import from __future__ import print_function from typing import Any from argparse import ArgumentParser from optparse import make_option from django.core.management.base import BaseCommand, CommandParser from zerver.lib.actions import bulk_remove_subscriptions from zerver.models import Realm, UserProfile, get_realm, get_stream, \ get_user_profile_by_email class Command(BaseCommand): help = """Remove some or all users in a realm from a stream.""" def add_arguments(self, parser): # type: (CommandParser) -> None parser.add_argument('-d', '--domain', dest='domain', type=str, help='The name of the realm in which you are ' 'removing people.') parser.add_argument('-s', '--stream', dest='stream', type=str, help='A stream name.') parser.add_argument('-u', '--users', dest='users', type=str, help='A comma-separated list of email addresses.') parser.add_argument('-a', '--all-users', dest='all_users', action="store_true", default=False, help='Remove all users in this realm from this stream.') def handle(self, **options): # type: (*Any, **Any) -> None if options["domain"] is None or options["stream"] is None or \ (options["users"] is None and options["all_users"] is None): self.print_help("python manage.py", "remove_users_from_stream") exit(1) realm = get_realm(options["domain"]) stream_name = options["stream"].strip() stream = get_stream(stream_name, realm) if options["all_users"]: user_profiles = UserProfile.objects.filter(realm=realm) else: emails = set([email.strip() for email in options["users"].split(",")]) user_profiles = [] for email in emails: user_profiles.append(get_user_profile_by_email(email)) result = bulk_remove_subscriptions(user_profiles, [stream]) not_subscribed = result[1] not_subscribed_users = {tup[0] for tup in not_subscribed} for user_profile in user_profiles: if user_profile in not_subscribed_users: print("%s was not subscribed" % (user_profile.email,)) else: print("Removed %s from %s" % (user_profile.email, stream_name))
apache-2.0
tri2sing/LinearAlgebraPython
test_hw2.py
1
1728
from GF2 import one from vec import Vec from vec import scalar_mul import itertools ## Problem 1 D = {'a','b','c'} v0 = Vec(D, {}) v1 = Vec(D, {'a': 1}) v2 = Vec(D, {'a': 0, 'b': 1}) v3 = Vec(D, { 'b': 2}) v4 = Vec(D, {'a': 10, 'b': 10}) def vec_select(veclist, k): return [Vec(v.D, v.f) for v in veclist if v[k]==0] def vec_sum(veclist, D): return sum(veclist, Vec(D, {})) def vec_select_sum(veclist, k, D): return vec_sum(vec_select(veclist, k), D) def scale_vecs(vecdict): return [scalar_mul(vecdict[k], 1.0/k) for k in vecdict] def GF2_span(D, L): result = [Vec(D, {})] if not L: return result indices = [k for k in range(len(L))] for num in range (1,len(indices)+1): for tuples in itertools.combinations(indices, num): vectorslist = [L[i] for i in list(tuples)] combination = vec_sum(vectorslist, D) result.append(combination) return result print(vec_select([v1, v2, v3, v4], 'a') == [Vec(D,{'b': 1}), Vec(D,{'b': 2})]) print(vec_sum([v1, v2, v3, v4], D) == Vec(D, {'b': 13, 'a': 11})) print(vec_sum([], D) == v0) print(vec_select_sum([v1, v2, v3, v4], 'a', D) == Vec(D, {'b': 3})) v5 = Vec({1,2,3}, {2: 9}) v6 = Vec({1,2,4}, {1: 1, 2: 2, 4: 8}) print(scale_vecs({3: v5, 5: v6}) == [Vec({1,2,3},{2: 3.0}), Vec({1,2,4},{1: 0.2, 2: 0.4, 4: 1.6})]) D = {'a', 'b', 'c'} L = [Vec(D, {'a': one, 'c': one}), Vec(D, {'b': one})] GF2Combs = GF2_span(D, L) print (len(GF2Combs) == 4) print (Vec(D, {}) in GF2Combs) print (Vec(D, {'b': one}) in GF2Combs) print (Vec(D, {'a':one, 'c':one}) in GF2Combs) print (Vec(D, {x:one for x in D}) in GF2Combs)
apache-2.0
lakshayg/tensorflow
tensorflow/python/eager/tape_test.py
19
6001
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Basic tests for gradients.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.eager import backprop from tensorflow.python.eager import context from tensorflow.python.eager import custom_gradient from tensorflow.python.eager import test from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops from tensorflow.python.ops import gradients_impl from tensorflow.python.ops import math_ops # Importing nn_grad for the registration functions. from tensorflow.python.ops import nn_grad # pylint: disable=unused-import from tensorflow.python.ops import nn_ops @custom_gradient.custom_gradient def two_outputs(a, b): mm = math_ops.matmul(a, b) r = math_ops.reduce_sum(mm) def grad(dmm, dr): return [ math_ops.matmul(dmm, b, transpose_b=True) + math_ops.matmul(array_ops.ones_like(b * dr), b, transpose_b=True), math_ops.matmul(a, dmm, transpose_b=True) + math_ops.matmul(a, array_ops.ones_like(a) * dr, transpose_b=True) ] return [mm, r], grad @custom_gradient.custom_gradient def gradient_is_constant(x): result = x * x def grad(dr): return [dr] return result, grad class TapeTest(test.TestCase): def testMultiOutput(self): def fn(x, y): c = x + y # Multiple outputs from split. d, f = array_ops.split(c, 2) return d + f a = constant_op.constant([[1., 0.], [0., 1.]]) b = constant_op.constant([[1., 2.], [3., 4.]]) da, db = backprop.gradients_function(fn, [0, 1])(a, b) with context.graph_mode(), self.test_session(): tf_a = constant_op.constant([[1, 0], [0, 1]], dtype=dtypes.float32) tf_b = constant_op.constant([[1, 2], [3, 4]], dtype=dtypes.float32) tf_c = tf_a + tf_b tf_d, tf_f = array_ops.split(tf_c, 2, axis=1) tf_e = tf_d + tf_f tf_da, tf_db = gradients_impl.gradients(tf_e, [tf_a, tf_b]) self.assertAllEqual(da, tf_da.eval()) self.assertAllEqual(db, tf_db.eval()) def testBasicFunctional(self): def forward(a, b): mm = math_ops.matmul(a, b) return math_ops.reduce_sum(mm) aa = constant_op.constant([[1., 0.], [0., 1.]]) bb = constant_op.constant([[1., 2.], [3., 4.]]) da, = backprop.gradients_function(forward, ['a'])(aa, bb) self.assertAllEqual(da, math_ops.matmul( array_ops.ones_like(aa), array_ops.transpose(bb)).numpy()) def testBasicFunctionalPositionalArg(self): def forward(a, b): mm = math_ops.matmul(a, b) return math_ops.reduce_sum(mm) aa = constant_op.constant([[1., 0.], [0., 1.]]) bb = constant_op.constant([[1., 2.], [3., 4.]]) da, = backprop.gradients_function(forward, [0])(aa, bb) self.assertAllEqual(da, math_ops.matmul( array_ops.ones_like(aa), array_ops.transpose(bb)).numpy()) def testBasicFunctionalWithValue(self): def forward(a, b): mm = math_ops.matmul(a, b) return math_ops.reduce_sum(mm) aa = constant_op.constant([[1., 0.], [0., 1.]]) bb = constant_op.constant([[1., 2.], [3., 4.]]) val, (da,) = backprop.val_and_grad_function(forward, ['a'])(aa, bb) self.assertAllEqual(da, math_ops.matmul( array_ops.ones_like(aa), array_ops.transpose(bb))) self.assertAllEqual(val, forward(aa, bb)) def testTwoOutputs(self): def fn(x, y): mm, r = two_outputs(x, y) return r + math_ops.reduce_sum(mm) a = constant_op.constant([[1., 0.], [0., 1.]]) b = constant_op.constant([[1., 2.], [3., 4.]]) da, db = backprop.gradients_function(fn, [0, 1])(a, b) with context.graph_mode(), self.test_session(): tf_a = constant_op.constant([[1, 0], [0, 1]], dtype=dtypes.float32) tf_b = constant_op.constant([[1, 2], [3, 4]], dtype=dtypes.float32) tf_mm = math_ops.matmul(tf_a, tf_b) tf_rr = 2 * math_ops.reduce_sum(tf_mm) tf_da, tf_db = gradients_impl.gradients(tf_rr, [tf_a, tf_b]) self.assertAllEqual(da, tf_da.eval()) self.assertAllEqual(db, tf_db.eval()) def testGcTwoOutputs(self): def fn(x, y): return nn_ops.sparse_softmax_cross_entropy_with_logits(logits=x, labels=y)[0] labels = constant_op.constant([0]) logits = constant_op.constant([[0.0]]) grad, = backprop.gradients_function(fn, [0])(logits, labels) self.assertAllEqual(grad, [[0.0]]) def testTfTensor(self): def fn(x): return x t = constant_op.constant(1.0) g, = backprop.gradients_function(fn, [0])(t) self.assertAllEqual(g, 1.0) def testCustomGradientGraphMode(self): with context.graph_mode(), self.test_session(): @custom_gradient.custom_gradient def f(x): def grad(dresult): return dresult * 10.0 return x, grad inp = constant_op.constant(1.0) grad = gradients_impl.gradients(f(inp), inp) self.assertAllEqual(grad[0].eval(), 10.0) if __name__ == '__main__': test.main()
apache-2.0
ProfessorX/Config
.PyCharm30/system/python_stubs/-1247971765/PyKDE4/kdecore/KService.py
1
4297
# encoding: utf-8 # module PyKDE4.kdecore # from /usr/lib/python3/dist-packages/PyKDE4/kdecore.cpython-34m-x86_64-linux-gnu.so # by generator 1.135 # no doc # imports import PyQt4.QtCore as __PyQt4_QtCore import PyQt4.QtNetwork as __PyQt4_QtNetwork from .KSycocaEntry import KSycocaEntry class KService(KSycocaEntry): # no doc def actions(self, *args, **kwargs): # real signature unknown pass def allowAsDefault(self, *args, **kwargs): # real signature unknown pass def allowMultipleFiles(self, *args, **kwargs): # real signature unknown pass def allServices(self, *args, **kwargs): # real signature unknown pass def categories(self, *args, **kwargs): # real signature unknown pass def comment(self, *args, **kwargs): # real signature unknown pass def dbusStartupType(self, *args, **kwargs): # real signature unknown pass def desktopEntryName(self, *args, **kwargs): # real signature unknown pass def desktopEntryPath(self, *args, **kwargs): # real signature unknown pass def docPath(self, *args, **kwargs): # real signature unknown pass def exec_(self, *args, **kwargs): # real signature unknown pass def genericName(self, *args, **kwargs): # real signature unknown pass def hasMimeType(self, *args, **kwargs): # real signature unknown pass def hasServiceType(self, *args, **kwargs): # real signature unknown pass def icon(self, *args, **kwargs): # real signature unknown pass def initialPreference(self, *args, **kwargs): # real signature unknown pass def isApplication(self, *args, **kwargs): # real signature unknown pass def keywords(self, *args, **kwargs): # real signature unknown pass def library(self, *args, **kwargs): # real signature unknown pass def locateLocal(self, *args, **kwargs): # real signature unknown pass def menuId(self, *args, **kwargs): # real signature unknown pass def mimeTypes(self, *args, **kwargs): # real signature unknown pass def newServicePath(self, *args, **kwargs): # real signature unknown pass def noDisplay(self, *args, **kwargs): # real signature unknown pass def parentApp(self, *args, **kwargs): # real signature unknown pass def path(self, *args, **kwargs): # real signature unknown pass def pluginKeyword(self, *args, **kwargs): # real signature unknown pass def property(self, *args, **kwargs): # real signature unknown pass def serviceByDesktopName(self, *args, **kwargs): # real signature unknown pass def serviceByDesktopPath(self, *args, **kwargs): # real signature unknown pass def serviceByMenuId(self, *args, **kwargs): # real signature unknown pass def serviceByName(self, *args, **kwargs): # real signature unknown pass def serviceByStorageId(self, *args, **kwargs): # real signature unknown pass def serviceTypes(self, *args, **kwargs): # real signature unknown pass def setMenuId(self, *args, **kwargs): # real signature unknown pass def setTerminal(self, *args, **kwargs): # real signature unknown pass def setTerminalOptions(self, *args, **kwargs): # real signature unknown pass def showInKDE(self, *args, **kwargs): # real signature unknown pass def storageId(self, *args, **kwargs): # real signature unknown pass def substituteUid(self, *args, **kwargs): # real signature unknown pass def terminal(self, *args, **kwargs): # real signature unknown pass def terminalOptions(self, *args, **kwargs): # real signature unknown pass def type(self, *args, **kwargs): # real signature unknown pass def untranslatedGenericName(self, *args, **kwargs): # real signature unknown pass def username(self, *args, **kwargs): # real signature unknown pass def __init__(self, *args, **kwargs): # real signature unknown pass DBusMulti = 2 DBusNone = 0 DBusStartupType = None # (!) real value is '' DBusUnique = 1 DBusWait = 3
gpl-2.0
sahildua2305/eden
static/scripts/tools/run_scheduler_tasks.py
28
4978
#!/usr/bin/env python # -*- coding: utf-8 -*- # Call Web2py scheduler in app models context and run a specified task using # its scheduled arguments. Two options for running this script: # # cd web2py # python applications/<app>/static/scripts/tools/run_scheduler_tasks.py --app=<app> --task=<task> # -- or -- # export WEB2PY_PATH=/path/to/web2py # cd $WEB2PY_PATH/applications/<app>/static/scripts/tools # python run_scheduler_tasks.py --app=<app> --task=<task> # # If task is omitted this will run all scheduled tasks sequentially. If both # args are omitted, the app will default to eden. # # Purpose of this is to allow executing tasks with the Eclipse debugger -- the # usual ways of starting a scheduler worker launchs it in a separate thread or # via exec, which the debugger doesn't step into. # # To set up an Eclipse run config, duplicate your Web2py run config, replace # the file to run with this script, and add at least the --app argument. # The WEB2PY_PATH is not needed in this case since the working directory will # be set to the web2py directory. # # The statement that executes the task function is marked with a comment: # SET BREAKPOINT HERE # Find that, set a breakpoint on that line, then step into the function. import os, sys, argparse if not "WEB2PY_PATH" in os.environ: os.environ["WEB2PY_PATH"] = os.getcwd() else: os.chdir(os.environ["WEB2PY_PATH"]) sys.path.append(os.environ["WEB2PY_PATH"]) if __name__ == "__main__": parser = argparse.ArgumentParser( description = """ Run Web2py scheduler tasks in Web2py application context, without the scheduler. Useful for executing the task in a debugger. Tasks must be scheduled in order to provide their arguments -- they won't be run if not scheduled. If task is specified, the first scheduled instance will be run. If task is not specified, the first scheduled instance for each task will be run sequentially. """, usage = """ export WEB2PY_PATH = /path/to/web2py; python run_scheduler_tasks.py --app=<app> [--task=<task>] [--allargs=<True|False>] """) parser.add_argument( "--app", dest="app", default="eden", help="Application directory name") parser.add_argument( "--task", dest="task", default=None, help="Task name") parser.add_argument( "--allargs", dest="allargs", default=True, help="If True (the default), run task with all scheduled arg sets. If False, run with first set encountered only.") args = vars(parser.parse_args()) app = args["app"] task = args["task"] allargs = args["allargs"] adir = os.path.join("applications", app) if not os.path.exists(adir): print >> sys.stderr, "Application not found: %s" % adir sys.exit(1) from gluon.custom_import import custom_import_install custom_import_install() from gluon.shell import env _env = env(app, c=None, import_models=True) globals().update(**_env) # This is present in case this is a first run of the models. db.commit() from gluon import current # Get tasks from the scheduler_task table. if task: query = (db.scheduler_task.task_name == task) else: query = (db.scheduler_task.id > 0) scheduled_tasks = db(query).select(orderby=db.scheduler_task.task_name) # Pick up the associated function objects from the scheduler's task list. # These are also stored in the S3Task instance -- both lists should be the # same. posted_tasks = current._scheduler.tasks #posted_tasks = current.response.s3.tasks task_name = None for task_row in scheduled_tasks: if not allargs and task_row.task_name == task_name: continue task_name = task_row.task_name task_function = posted_tasks.get(task_name, None) if not task_function: print >> sys.stderr, "Skipping task %s as no function in task list" % task_name continue # That args list and vars dict are stored as strings. if task_row.args: task_args = eval(task_row.args) else: task_args = [] if task_row.vars: task_vars = eval(task_row.vars) else: task_vars = {} try: # To examine each task function in the debugger, set a breakpoint # here, then step into the function. task_function(*task_args, **task_vars) # SET BREAKPOINT HERE except Exception: print >> sys.stderr, "Task %s threw:\n" % task_name import traceback exc_type, exc_value, exc_trace = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_trace, file=sys.stderr) try: # Python docs for sys.exc_info() warn that one must clean up the # extracted trace object. del exc_trace except: pass
mit
mozilla/feedthefox
feedthefox/users/providers/github/views.py
2
1164
import requests from allauth.socialaccount.providers.oauth2.views import (OAuth2LoginView, OAuth2CallbackView) from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter class FeedTheFoxGitHubOAuth2Adapter(GitHubOAuth2Adapter): """ A custom GitHub OAuth adapter to be used for fetching the list of private email addresses stored for the given user at GitHub. We store those email addresses in the extra data of each account. """ email_url = 'https://api.github.com/user/emails' def complete_login(self, request, app, token, **kwargs): params = {'access_token': token.token} profile_data = requests.get(self.profile_url, params=params) extra_data = profile_data.json() email_data = requests.get(self.email_url, params=params) extra_data['email_addresses'] = email_data.json() return self.get_provider().sociallogin_from_response(request, extra_data) oauth2_login = OAuth2LoginView.adapter_view(FeedTheFoxGitHubOAuth2Adapter) oauth2_callback = OAuth2CallbackView.adapter_view(FeedTheFoxGitHubOAuth2Adapter)
mpl-2.0
munnerz/CouchPotatoServer
libs/unrar2/rar_exceptions.py
153
1391
# Copyright (c) 2003-2005 Jimmy Retzlaff, 2008 Konstantin Yegupov # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # Low level interface - see UnRARDLL\UNRARDLL.TXT class ArchiveHeaderBroken(Exception): pass class InvalidRARArchive(Exception): pass class FileOpenError(Exception): pass class IncorrectRARPassword(Exception): pass class InvalidRARArchiveUsage(Exception): pass
gpl-3.0
huang4fstudio/django
tests/template_tests/filter_tests/test_dictsortreversed.py
342
1066
from django.template.defaultfilters import dictsortreversed from django.test import SimpleTestCase class FunctionTests(SimpleTestCase): def test_sort(self): sorted_dicts = dictsortreversed( [{'age': 23, 'name': 'Barbara-Ann'}, {'age': 63, 'name': 'Ra Ra Rasputin'}, {'name': 'Jonny B Goode', 'age': 18}], 'age', ) self.assertEqual( [sorted(dict.items()) for dict in sorted_dicts], [[('age', 63), ('name', 'Ra Ra Rasputin')], [('age', 23), ('name', 'Barbara-Ann')], [('age', 18), ('name', 'Jonny B Goode')]], ) def test_invalid_values(self): """ If dictsortreversed is passed something other than a list of dictionaries, fail silently. """ self.assertEqual(dictsortreversed([1, 2, 3], 'age'), '') self.assertEqual(dictsortreversed('Hello!', 'age'), '') self.assertEqual(dictsortreversed({'a': 1}, 'age'), '') self.assertEqual(dictsortreversed(1, 'age'), '')
bsd-3-clause
jsteemann/arangodb
3rdParty/V8-4.3.61/third_party/python_26/Lib/test/test_bigmem.py
56
38218
from test import test_support from test.test_support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest import unittest import operator import string import sys # Bigmem testing houserules: # # - Try not to allocate too many large objects. It's okay to rely on # refcounting semantics, but don't forget that 's = create_largestring()' # doesn't release the old 's' (if it exists) until well after its new # value has been created. Use 'del s' before the create_largestring call. # # - Do *not* compare large objects using assertEquals or similar. It's a # lengty operation and the errormessage will be utterly useless due to # its size. To make sure whether a result has the right contents, better # to use the strip or count methods, or compare meaningful slices. # # - Don't forget to test for large indices, offsets and results and such, # in addition to large sizes. # # - When repeating an object (say, a substring, or a small list) to create # a large object, make the subobject of a length that is not a power of # 2. That way, int-wrapping problems are more easily detected. # # - While the bigmemtest decorator speaks of 'minsize', all tests will # actually be called with a much smaller number too, in the normal # test run (5Kb currently.) This is so the tests themselves get frequent # testing. Consequently, always make all large allocations based on the # passed-in 'size', and don't rely on the size being very large. Also, # memuse-per-size should remain sane (less than a few thousand); if your # test uses more, adjust 'size' upward, instead. class StrTest(unittest.TestCase): @bigmemtest(minsize=_2G, memuse=2) def test_capitalize(self, size): SUBSTR = ' abc def ghi' s = '-' * size + SUBSTR caps = s.capitalize() self.assertEquals(caps[-len(SUBSTR):], SUBSTR.capitalize()) self.assertEquals(caps.lstrip('-'), SUBSTR) @bigmemtest(minsize=_2G + 10, memuse=1) def test_center(self, size): SUBSTR = ' abc def ghi' s = SUBSTR.center(size) self.assertEquals(len(s), size) lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2 if len(s) % 2: lpadsize += 1 self.assertEquals(s[lpadsize:-rpadsize], SUBSTR) self.assertEquals(s.strip(), SUBSTR.strip()) @precisionbigmemtest(size=_2G - 1, memuse=1) def test_center_unicode(self, size): SUBSTR = u' abc def ghi' try: s = SUBSTR.center(size) except OverflowError: pass # acceptable on 32-bit else: self.assertEquals(len(s), size) lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2 if len(s) % 2: lpadsize += 1 self.assertEquals(s[lpadsize:-rpadsize], SUBSTR) self.assertEquals(s.strip(), SUBSTR.strip()) del s @bigmemtest(minsize=_2G, memuse=2) def test_count(self, size): SUBSTR = ' abc def ghi' s = '.' * size + SUBSTR self.assertEquals(s.count('.'), size) s += '.' self.assertEquals(s.count('.'), size + 1) self.assertEquals(s.count(' '), 3) self.assertEquals(s.count('i'), 1) self.assertEquals(s.count('j'), 0) @bigmemtest(minsize=_2G + 2, memuse=3) def test_decode(self, size): s = '.' * size self.assertEquals(len(s.decode('utf-8')), size) def basic_encode_test(self, size, enc, c=u'.', expectedsize=None): if expectedsize is None: expectedsize = size s = c * size self.assertEquals(len(s.encode(enc)), expectedsize) @bigmemtest(minsize=_2G + 2, memuse=3) def test_encode(self, size): return self.basic_encode_test(size, 'utf-8') @precisionbigmemtest(size=_4G / 6 + 2, memuse=2) def test_encode_raw_unicode_escape(self, size): try: return self.basic_encode_test(size, 'raw_unicode_escape') except MemoryError: pass # acceptable on 32-bit @precisionbigmemtest(size=_4G / 5 + 70, memuse=3) def test_encode_utf7(self, size): try: return self.basic_encode_test(size, 'utf7') except MemoryError: pass # acceptable on 32-bit @precisionbigmemtest(size=_4G / 4 + 5, memuse=6) def test_encode_utf32(self, size): try: return self.basic_encode_test(size, 'utf32', expectedsize=4*size+4) except MemoryError: pass # acceptable on 32-bit @precisionbigmemtest(size=_2G-1, memuse=2) def test_decodeascii(self, size): return self.basic_encode_test(size, 'ascii', c='A') @precisionbigmemtest(size=_4G / 5, memuse=6+2) def test_unicode_repr_oflw(self, size): try: s = u"\uAAAA"*size r = repr(s) except MemoryError: pass # acceptable on 32-bit else: self.failUnless(s == eval(r)) @bigmemtest(minsize=_2G, memuse=2) def test_endswith(self, size): SUBSTR = ' abc def ghi' s = '-' * size + SUBSTR self.failUnless(s.endswith(SUBSTR)) self.failUnless(s.endswith(s)) s2 = '...' + s self.failUnless(s2.endswith(s)) self.failIf(s.endswith('a' + SUBSTR)) self.failIf(SUBSTR.endswith(s)) @bigmemtest(minsize=_2G + 10, memuse=2) def test_expandtabs(self, size): s = '-' * size tabsize = 8 self.assertEquals(s.expandtabs(), s) del s slen, remainder = divmod(size, tabsize) s = ' \t' * slen s = s.expandtabs(tabsize) self.assertEquals(len(s), size - remainder) self.assertEquals(len(s.strip(' ')), 0) @bigmemtest(minsize=_2G, memuse=2) def test_find(self, size): SUBSTR = ' abc def ghi' sublen = len(SUBSTR) s = ''.join([SUBSTR, '-' * size, SUBSTR]) self.assertEquals(s.find(' '), 0) self.assertEquals(s.find(SUBSTR), 0) self.assertEquals(s.find(' ', sublen), sublen + size) self.assertEquals(s.find(SUBSTR, len(SUBSTR)), sublen + size) self.assertEquals(s.find('i'), SUBSTR.find('i')) self.assertEquals(s.find('i', sublen), sublen + size + SUBSTR.find('i')) self.assertEquals(s.find('i', size), sublen + size + SUBSTR.find('i')) self.assertEquals(s.find('j'), -1) @bigmemtest(minsize=_2G, memuse=2) def test_index(self, size): SUBSTR = ' abc def ghi' sublen = len(SUBSTR) s = ''.join([SUBSTR, '-' * size, SUBSTR]) self.assertEquals(s.index(' '), 0) self.assertEquals(s.index(SUBSTR), 0) self.assertEquals(s.index(' ', sublen), sublen + size) self.assertEquals(s.index(SUBSTR, sublen), sublen + size) self.assertEquals(s.index('i'), SUBSTR.index('i')) self.assertEquals(s.index('i', sublen), sublen + size + SUBSTR.index('i')) self.assertEquals(s.index('i', size), sublen + size + SUBSTR.index('i')) self.assertRaises(ValueError, s.index, 'j') @bigmemtest(minsize=_2G, memuse=2) def test_isalnum(self, size): SUBSTR = '123456' s = 'a' * size + SUBSTR self.failUnless(s.isalnum()) s += '.' self.failIf(s.isalnum()) @bigmemtest(minsize=_2G, memuse=2) def test_isalpha(self, size): SUBSTR = 'zzzzzzz' s = 'a' * size + SUBSTR self.failUnless(s.isalpha()) s += '.' self.failIf(s.isalpha()) @bigmemtest(minsize=_2G, memuse=2) def test_isdigit(self, size): SUBSTR = '123456' s = '9' * size + SUBSTR self.failUnless(s.isdigit()) s += 'z' self.failIf(s.isdigit()) @bigmemtest(minsize=_2G, memuse=2) def test_islower(self, size): chars = ''.join([ chr(c) for c in range(255) if not chr(c).isupper() ]) repeats = size // len(chars) + 2 s = chars * repeats self.failUnless(s.islower()) s += 'A' self.failIf(s.islower()) @bigmemtest(minsize=_2G, memuse=2) def test_isspace(self, size): whitespace = ' \f\n\r\t\v' repeats = size // len(whitespace) + 2 s = whitespace * repeats self.failUnless(s.isspace()) s += 'j' self.failIf(s.isspace()) @bigmemtest(minsize=_2G, memuse=2) def test_istitle(self, size): SUBSTR = '123456' s = ''.join(['A', 'a' * size, SUBSTR]) self.failUnless(s.istitle()) s += 'A' self.failUnless(s.istitle()) s += 'aA' self.failIf(s.istitle()) @bigmemtest(minsize=_2G, memuse=2) def test_isupper(self, size): chars = ''.join([ chr(c) for c in range(255) if not chr(c).islower() ]) repeats = size // len(chars) + 2 s = chars * repeats self.failUnless(s.isupper()) s += 'a' self.failIf(s.isupper()) @bigmemtest(minsize=_2G, memuse=2) def test_join(self, size): s = 'A' * size x = s.join(['aaaaa', 'bbbbb']) self.assertEquals(x.count('a'), 5) self.assertEquals(x.count('b'), 5) self.failUnless(x.startswith('aaaaaA')) self.failUnless(x.endswith('Abbbbb')) @bigmemtest(minsize=_2G + 10, memuse=1) def test_ljust(self, size): SUBSTR = ' abc def ghi' s = SUBSTR.ljust(size) self.failUnless(s.startswith(SUBSTR + ' ')) self.assertEquals(len(s), size) self.assertEquals(s.strip(), SUBSTR.strip()) @bigmemtest(minsize=_2G + 10, memuse=2) def test_lower(self, size): s = 'A' * size s = s.lower() self.assertEquals(len(s), size) self.assertEquals(s.count('a'), size) @bigmemtest(minsize=_2G + 10, memuse=1) def test_lstrip(self, size): SUBSTR = 'abc def ghi' s = SUBSTR.rjust(size) self.assertEquals(len(s), size) self.assertEquals(s.lstrip(), SUBSTR.lstrip()) del s s = SUBSTR.ljust(size) self.assertEquals(len(s), size) stripped = s.lstrip() self.failUnless(stripped is s) @bigmemtest(minsize=_2G + 10, memuse=2) def test_replace(self, size): replacement = 'a' s = ' ' * size s = s.replace(' ', replacement) self.assertEquals(len(s), size) self.assertEquals(s.count(replacement), size) s = s.replace(replacement, ' ', size - 4) self.assertEquals(len(s), size) self.assertEquals(s.count(replacement), 4) self.assertEquals(s[-10:], ' aaaa') @bigmemtest(minsize=_2G, memuse=2) def test_rfind(self, size): SUBSTR = ' abc def ghi' sublen = len(SUBSTR) s = ''.join([SUBSTR, '-' * size, SUBSTR]) self.assertEquals(s.rfind(' '), sublen + size + SUBSTR.rfind(' ')) self.assertEquals(s.rfind(SUBSTR), sublen + size) self.assertEquals(s.rfind(' ', 0, size), SUBSTR.rfind(' ')) self.assertEquals(s.rfind(SUBSTR, 0, sublen + size), 0) self.assertEquals(s.rfind('i'), sublen + size + SUBSTR.rfind('i')) self.assertEquals(s.rfind('i', 0, sublen), SUBSTR.rfind('i')) self.assertEquals(s.rfind('i', 0, sublen + size), SUBSTR.rfind('i')) self.assertEquals(s.rfind('j'), -1) @bigmemtest(minsize=_2G, memuse=2) def test_rindex(self, size): SUBSTR = ' abc def ghi' sublen = len(SUBSTR) s = ''.join([SUBSTR, '-' * size, SUBSTR]) self.assertEquals(s.rindex(' '), sublen + size + SUBSTR.rindex(' ')) self.assertEquals(s.rindex(SUBSTR), sublen + size) self.assertEquals(s.rindex(' ', 0, sublen + size - 1), SUBSTR.rindex(' ')) self.assertEquals(s.rindex(SUBSTR, 0, sublen + size), 0) self.assertEquals(s.rindex('i'), sublen + size + SUBSTR.rindex('i')) self.assertEquals(s.rindex('i', 0, sublen), SUBSTR.rindex('i')) self.assertEquals(s.rindex('i', 0, sublen + size), SUBSTR.rindex('i')) self.assertRaises(ValueError, s.rindex, 'j') @bigmemtest(minsize=_2G + 10, memuse=1) def test_rjust(self, size): SUBSTR = ' abc def ghi' s = SUBSTR.ljust(size) self.failUnless(s.startswith(SUBSTR + ' ')) self.assertEquals(len(s), size) self.assertEquals(s.strip(), SUBSTR.strip()) @bigmemtest(minsize=_2G + 10, memuse=1) def test_rstrip(self, size): SUBSTR = ' abc def ghi' s = SUBSTR.ljust(size) self.assertEquals(len(s), size) self.assertEquals(s.rstrip(), SUBSTR.rstrip()) del s s = SUBSTR.rjust(size) self.assertEquals(len(s), size) stripped = s.rstrip() self.failUnless(stripped is s) # The test takes about size bytes to build a string, and then about # sqrt(size) substrings of sqrt(size) in size and a list to # hold sqrt(size) items. It's close but just over 2x size. @bigmemtest(minsize=_2G, memuse=2.1) def test_split_small(self, size): # Crudely calculate an estimate so that the result of s.split won't # take up an inordinate amount of memory chunksize = int(size ** 0.5 + 2) SUBSTR = 'a' + ' ' * chunksize s = SUBSTR * chunksize l = s.split() self.assertEquals(len(l), chunksize) self.assertEquals(set(l), set(['a'])) del l l = s.split('a') self.assertEquals(len(l), chunksize + 1) self.assertEquals(set(l), set(['', ' ' * chunksize])) # Allocates a string of twice size (and briefly two) and a list of # size. Because of internal affairs, the s.split() call produces a # list of size times the same one-character string, so we only # suffer for the list size. (Otherwise, it'd cost another 48 times # size in bytes!) Nevertheless, a list of size takes # 8*size bytes. @bigmemtest(minsize=_2G + 5, memuse=10) def test_split_large(self, size): s = ' a' * size + ' ' l = s.split() self.assertEquals(len(l), size) self.assertEquals(set(l), set(['a'])) del l l = s.split('a') self.assertEquals(len(l), size + 1) self.assertEquals(set(l), set([' '])) @bigmemtest(minsize=_2G, memuse=2.1) def test_splitlines(self, size): # Crudely calculate an estimate so that the result of s.split won't # take up an inordinate amount of memory chunksize = int(size ** 0.5 + 2) // 2 SUBSTR = ' ' * chunksize + '\n' + ' ' * chunksize + '\r\n' s = SUBSTR * chunksize l = s.splitlines() self.assertEquals(len(l), chunksize * 2) self.assertEquals(set(l), set([' ' * chunksize])) @bigmemtest(minsize=_2G, memuse=2) def test_startswith(self, size): SUBSTR = ' abc def ghi' s = '-' * size + SUBSTR self.failUnless(s.startswith(s)) self.failUnless(s.startswith('-' * size)) self.failIf(s.startswith(SUBSTR)) @bigmemtest(minsize=_2G, memuse=1) def test_strip(self, size): SUBSTR = ' abc def ghi ' s = SUBSTR.rjust(size) self.assertEquals(len(s), size) self.assertEquals(s.strip(), SUBSTR.strip()) del s s = SUBSTR.ljust(size) self.assertEquals(len(s), size) self.assertEquals(s.strip(), SUBSTR.strip()) @bigmemtest(minsize=_2G, memuse=2) def test_swapcase(self, size): SUBSTR = "aBcDeFG12.'\xa9\x00" sublen = len(SUBSTR) repeats = size // sublen + 2 s = SUBSTR * repeats s = s.swapcase() self.assertEquals(len(s), sublen * repeats) self.assertEquals(s[:sublen * 3], SUBSTR.swapcase() * 3) self.assertEquals(s[-sublen * 3:], SUBSTR.swapcase() * 3) @bigmemtest(minsize=_2G, memuse=2) def test_title(self, size): SUBSTR = 'SpaaHAaaAaham' s = SUBSTR * (size // len(SUBSTR) + 2) s = s.title() self.failUnless(s.startswith((SUBSTR * 3).title())) self.failUnless(s.endswith(SUBSTR.lower() * 3)) @bigmemtest(minsize=_2G, memuse=2) def test_translate(self, size): trans = string.maketrans('.aZ', '-!$') SUBSTR = 'aZz.z.Aaz.' sublen = len(SUBSTR) repeats = size // sublen + 2 s = SUBSTR * repeats s = s.translate(trans) self.assertEquals(len(s), repeats * sublen) self.assertEquals(s[:sublen], SUBSTR.translate(trans)) self.assertEquals(s[-sublen:], SUBSTR.translate(trans)) self.assertEquals(s.count('.'), 0) self.assertEquals(s.count('!'), repeats * 2) self.assertEquals(s.count('z'), repeats * 3) @bigmemtest(minsize=_2G + 5, memuse=2) def test_upper(self, size): s = 'a' * size s = s.upper() self.assertEquals(len(s), size) self.assertEquals(s.count('A'), size) @bigmemtest(minsize=_2G + 20, memuse=1) def test_zfill(self, size): SUBSTR = '-568324723598234' s = SUBSTR.zfill(size) self.failUnless(s.endswith('0' + SUBSTR[1:])) self.failUnless(s.startswith('-0')) self.assertEquals(len(s), size) self.assertEquals(s.count('0'), size - len(SUBSTR)) @bigmemtest(minsize=_2G + 10, memuse=2) def test_format(self, size): s = '-' * size sf = '%s' % (s,) self.failUnless(s == sf) del sf sf = '..%s..' % (s,) self.assertEquals(len(sf), len(s) + 4) self.failUnless(sf.startswith('..-')) self.failUnless(sf.endswith('-..')) del s, sf size //= 2 edge = '-' * size s = ''.join([edge, '%s', edge]) del edge s = s % '...' self.assertEquals(len(s), size * 2 + 3) self.assertEquals(s.count('.'), 3) self.assertEquals(s.count('-'), size * 2) @bigmemtest(minsize=_2G + 10, memuse=2) def test_repr_small(self, size): s = '-' * size s = repr(s) self.assertEquals(len(s), size + 2) self.assertEquals(s[0], "'") self.assertEquals(s[-1], "'") self.assertEquals(s.count('-'), size) del s # repr() will create a string four times as large as this 'binary # string', but we don't want to allocate much more than twice # size in total. (We do extra testing in test_repr_large()) size = size // 5 * 2 s = '\x00' * size s = repr(s) self.assertEquals(len(s), size * 4 + 2) self.assertEquals(s[0], "'") self.assertEquals(s[-1], "'") self.assertEquals(s.count('\\'), size) self.assertEquals(s.count('0'), size * 2) @bigmemtest(minsize=_2G + 10, memuse=5) def test_repr_large(self, size): s = '\x00' * size s = repr(s) self.assertEquals(len(s), size * 4 + 2) self.assertEquals(s[0], "'") self.assertEquals(s[-1], "'") self.assertEquals(s.count('\\'), size) self.assertEquals(s.count('0'), size * 2) @bigmemtest(minsize=2**32 / 5, memuse=6+2) def test_unicode_repr(self, size): s = u"\uAAAA" * size self.failUnless(len(repr(s)) > size) # This test is meaningful even with size < 2G, as long as the # doubled string is > 2G (but it tests more if both are > 2G :) @bigmemtest(minsize=_1G + 2, memuse=3) def test_concat(self, size): s = '.' * size self.assertEquals(len(s), size) s = s + s self.assertEquals(len(s), size * 2) self.assertEquals(s.count('.'), size * 2) # This test is meaningful even with size < 2G, as long as the # repeated string is > 2G (but it tests more if both are > 2G :) @bigmemtest(minsize=_1G + 2, memuse=3) def test_repeat(self, size): s = '.' * size self.assertEquals(len(s), size) s = s * 2 self.assertEquals(len(s), size * 2) self.assertEquals(s.count('.'), size * 2) @bigmemtest(minsize=_2G + 20, memuse=1) def test_slice_and_getitem(self, size): SUBSTR = '0123456789' sublen = len(SUBSTR) s = SUBSTR * (size // sublen) stepsize = len(s) // 100 stepsize = stepsize - (stepsize % sublen) for i in range(0, len(s) - stepsize, stepsize): self.assertEquals(s[i], SUBSTR[0]) self.assertEquals(s[i:i + sublen], SUBSTR) self.assertEquals(s[i:i + sublen:2], SUBSTR[::2]) if i > 0: self.assertEquals(s[i + sublen - 1:i - 1:-3], SUBSTR[sublen::-3]) # Make sure we do some slicing and indexing near the end of the # string, too. self.assertEquals(s[len(s) - 1], SUBSTR[-1]) self.assertEquals(s[-1], SUBSTR[-1]) self.assertEquals(s[len(s) - 10], SUBSTR[0]) self.assertEquals(s[-sublen], SUBSTR[0]) self.assertEquals(s[len(s):], '') self.assertEquals(s[len(s) - 1:], SUBSTR[-1]) self.assertEquals(s[-1:], SUBSTR[-1]) self.assertEquals(s[len(s) - sublen:], SUBSTR) self.assertEquals(s[-sublen:], SUBSTR) self.assertEquals(len(s[:]), len(s)) self.assertEquals(len(s[:len(s) - 5]), len(s) - 5) self.assertEquals(len(s[5:-5]), len(s) - 10) self.assertRaises(IndexError, operator.getitem, s, len(s)) self.assertRaises(IndexError, operator.getitem, s, len(s) + 1) self.assertRaises(IndexError, operator.getitem, s, len(s) + 1<<31) @bigmemtest(minsize=_2G, memuse=2) def test_contains(self, size): SUBSTR = '0123456789' edge = '-' * (size // 2) s = ''.join([edge, SUBSTR, edge]) del edge self.failUnless(SUBSTR in s) self.failIf(SUBSTR * 2 in s) self.failUnless('-' in s) self.failIf('a' in s) s += 'a' self.failUnless('a' in s) @bigmemtest(minsize=_2G + 10, memuse=2) def test_compare(self, size): s1 = '-' * size s2 = '-' * size self.failUnless(s1 == s2) del s2 s2 = s1 + 'a' self.failIf(s1 == s2) del s2 s2 = '.' * size self.failIf(s1 == s2) @bigmemtest(minsize=_2G + 10, memuse=1) def test_hash(self, size): # Not sure if we can do any meaningful tests here... Even if we # start relying on the exact algorithm used, the result will be # different depending on the size of the C 'long int'. Even this # test is dodgy (there's no *guarantee* that the two things should # have a different hash, even if they, in the current # implementation, almost always do.) s = '\x00' * size h1 = hash(s) del s s = '\x00' * (size + 1) self.failIf(h1 == hash(s)) class TupleTest(unittest.TestCase): # Tuples have a small, fixed-sized head and an array of pointers to # data. Since we're testing 64-bit addressing, we can assume that the # pointers are 8 bytes, and that thus that the tuples take up 8 bytes # per size. # As a side-effect of testing long tuples, these tests happen to test # having more than 2<<31 references to any given object. Hence the # use of different types of objects as contents in different tests. @bigmemtest(minsize=_2G + 2, memuse=16) def test_compare(self, size): t1 = (u'',) * size t2 = (u'',) * size self.failUnless(t1 == t2) del t2 t2 = (u'',) * (size + 1) self.failIf(t1 == t2) del t2 t2 = (1,) * size self.failIf(t1 == t2) # Test concatenating into a single tuple of more than 2G in length, # and concatenating a tuple of more than 2G in length separately, so # the smaller test still gets run even if there isn't memory for the # larger test (but we still let the tester know the larger test is # skipped, in verbose mode.) def basic_concat_test(self, size): t = ((),) * size self.assertEquals(len(t), size) t = t + t self.assertEquals(len(t), size * 2) @bigmemtest(minsize=_2G // 2 + 2, memuse=24) def test_concat_small(self, size): return self.basic_concat_test(size) @bigmemtest(minsize=_2G + 2, memuse=24) def test_concat_large(self, size): return self.basic_concat_test(size) @bigmemtest(minsize=_2G // 5 + 10, memuse=8 * 5) def test_contains(self, size): t = (1, 2, 3, 4, 5) * size self.assertEquals(len(t), size * 5) self.failUnless(5 in t) self.failIf((1, 2, 3, 4, 5) in t) self.failIf(0 in t) @bigmemtest(minsize=_2G + 10, memuse=8) def test_hash(self, size): t1 = (0,) * size h1 = hash(t1) del t1 t2 = (0,) * (size + 1) self.failIf(h1 == hash(t2)) @bigmemtest(minsize=_2G + 10, memuse=8) def test_index_and_slice(self, size): t = (None,) * size self.assertEquals(len(t), size) self.assertEquals(t[-1], None) self.assertEquals(t[5], None) self.assertEquals(t[size - 1], None) self.assertRaises(IndexError, operator.getitem, t, size) self.assertEquals(t[:5], (None,) * 5) self.assertEquals(t[-5:], (None,) * 5) self.assertEquals(t[20:25], (None,) * 5) self.assertEquals(t[-25:-20], (None,) * 5) self.assertEquals(t[size - 5:], (None,) * 5) self.assertEquals(t[size - 5:size], (None,) * 5) self.assertEquals(t[size - 6:size - 2], (None,) * 4) self.assertEquals(t[size:size], ()) self.assertEquals(t[size:size+5], ()) # Like test_concat, split in two. def basic_test_repeat(self, size): t = ('',) * size self.assertEquals(len(t), size) t = t * 2 self.assertEquals(len(t), size * 2) @bigmemtest(minsize=_2G // 2 + 2, memuse=24) def test_repeat_small(self, size): return self.basic_test_repeat(size) @bigmemtest(minsize=_2G + 2, memuse=24) def test_repeat_large(self, size): return self.basic_test_repeat(size) @bigmemtest(minsize=_1G - 1, memuse=12) def test_repeat_large_2(self, size): return self.basic_test_repeat(size) @precisionbigmemtest(size=_1G - 1, memuse=9) def test_from_2G_generator(self, size): try: t = tuple(xrange(size)) except MemoryError: pass # acceptable on 32-bit else: count = 0 for item in t: self.assertEquals(item, count) count += 1 self.assertEquals(count, size) @precisionbigmemtest(size=_1G - 25, memuse=9) def test_from_almost_2G_generator(self, size): try: t = tuple(xrange(size)) count = 0 for item in t: self.assertEquals(item, count) count += 1 self.assertEquals(count, size) except MemoryError: pass # acceptable, expected on 32-bit # Like test_concat, split in two. def basic_test_repr(self, size): t = (0,) * size s = repr(t) # The repr of a tuple of 0's is exactly three times the tuple length. self.assertEquals(len(s), size * 3) self.assertEquals(s[:5], '(0, 0') self.assertEquals(s[-5:], '0, 0)') self.assertEquals(s.count('0'), size) @bigmemtest(minsize=_2G // 3 + 2, memuse=8 + 3) def test_repr_small(self, size): return self.basic_test_repr(size) @bigmemtest(minsize=_2G + 2, memuse=8 + 3) def test_repr_large(self, size): return self.basic_test_repr(size) class ListTest(unittest.TestCase): # Like tuples, lists have a small, fixed-sized head and an array of # pointers to data, so 8 bytes per size. Also like tuples, we make the # lists hold references to various objects to test their refcount # limits. @bigmemtest(minsize=_2G + 2, memuse=16) def test_compare(self, size): l1 = [u''] * size l2 = [u''] * size self.failUnless(l1 == l2) del l2 l2 = [u''] * (size + 1) self.failIf(l1 == l2) del l2 l2 = [2] * size self.failIf(l1 == l2) # Test concatenating into a single list of more than 2G in length, # and concatenating a list of more than 2G in length separately, so # the smaller test still gets run even if there isn't memory for the # larger test (but we still let the tester know the larger test is # skipped, in verbose mode.) def basic_test_concat(self, size): l = [[]] * size self.assertEquals(len(l), size) l = l + l self.assertEquals(len(l), size * 2) @bigmemtest(minsize=_2G // 2 + 2, memuse=24) def test_concat_small(self, size): return self.basic_test_concat(size) @bigmemtest(minsize=_2G + 2, memuse=24) def test_concat_large(self, size): return self.basic_test_concat(size) def basic_test_inplace_concat(self, size): l = [sys.stdout] * size l += l self.assertEquals(len(l), size * 2) self.failUnless(l[0] is l[-1]) self.failUnless(l[size - 1] is l[size + 1]) @bigmemtest(minsize=_2G // 2 + 2, memuse=24) def test_inplace_concat_small(self, size): return self.basic_test_inplace_concat(size) @bigmemtest(minsize=_2G + 2, memuse=24) def test_inplace_concat_large(self, size): return self.basic_test_inplace_concat(size) @bigmemtest(minsize=_2G // 5 + 10, memuse=8 * 5) def test_contains(self, size): l = [1, 2, 3, 4, 5] * size self.assertEquals(len(l), size * 5) self.failUnless(5 in l) self.failIf([1, 2, 3, 4, 5] in l) self.failIf(0 in l) @bigmemtest(minsize=_2G + 10, memuse=8) def test_hash(self, size): l = [0] * size self.failUnlessRaises(TypeError, hash, l) @bigmemtest(minsize=_2G + 10, memuse=8) def test_index_and_slice(self, size): l = [None] * size self.assertEquals(len(l), size) self.assertEquals(l[-1], None) self.assertEquals(l[5], None) self.assertEquals(l[size - 1], None) self.assertRaises(IndexError, operator.getitem, l, size) self.assertEquals(l[:5], [None] * 5) self.assertEquals(l[-5:], [None] * 5) self.assertEquals(l[20:25], [None] * 5) self.assertEquals(l[-25:-20], [None] * 5) self.assertEquals(l[size - 5:], [None] * 5) self.assertEquals(l[size - 5:size], [None] * 5) self.assertEquals(l[size - 6:size - 2], [None] * 4) self.assertEquals(l[size:size], []) self.assertEquals(l[size:size+5], []) l[size - 2] = 5 self.assertEquals(len(l), size) self.assertEquals(l[-3:], [None, 5, None]) self.assertEquals(l.count(5), 1) self.assertRaises(IndexError, operator.setitem, l, size, 6) self.assertEquals(len(l), size) l[size - 7:] = [1, 2, 3, 4, 5] size -= 2 self.assertEquals(len(l), size) self.assertEquals(l[-7:], [None, None, 1, 2, 3, 4, 5]) l[:7] = [1, 2, 3, 4, 5] size -= 2 self.assertEquals(len(l), size) self.assertEquals(l[:7], [1, 2, 3, 4, 5, None, None]) del l[size - 1] size -= 1 self.assertEquals(len(l), size) self.assertEquals(l[-1], 4) del l[-2:] size -= 2 self.assertEquals(len(l), size) self.assertEquals(l[-1], 2) del l[0] size -= 1 self.assertEquals(len(l), size) self.assertEquals(l[0], 2) del l[:2] size -= 2 self.assertEquals(len(l), size) self.assertEquals(l[0], 4) # Like test_concat, split in two. def basic_test_repeat(self, size): l = [] * size self.failIf(l) l = [''] * size self.assertEquals(len(l), size) l = l * 2 self.assertEquals(len(l), size * 2) @bigmemtest(minsize=_2G // 2 + 2, memuse=24) def test_repeat_small(self, size): return self.basic_test_repeat(size) @bigmemtest(minsize=_2G + 2, memuse=24) def test_repeat_large(self, size): return self.basic_test_repeat(size) def basic_test_inplace_repeat(self, size): l = [''] l *= size self.assertEquals(len(l), size) self.failUnless(l[0] is l[-1]) del l l = [''] * size l *= 2 self.assertEquals(len(l), size * 2) self.failUnless(l[size - 1] is l[-1]) @bigmemtest(minsize=_2G // 2 + 2, memuse=16) def test_inplace_repeat_small(self, size): return self.basic_test_inplace_repeat(size) @bigmemtest(minsize=_2G + 2, memuse=16) def test_inplace_repeat_large(self, size): return self.basic_test_inplace_repeat(size) def basic_test_repr(self, size): l = [0] * size s = repr(l) # The repr of a list of 0's is exactly three times the list length. self.assertEquals(len(s), size * 3) self.assertEquals(s[:5], '[0, 0') self.assertEquals(s[-5:], '0, 0]') self.assertEquals(s.count('0'), size) @bigmemtest(minsize=_2G // 3 + 2, memuse=8 + 3) def test_repr_small(self, size): return self.basic_test_repr(size) @bigmemtest(minsize=_2G + 2, memuse=8 + 3) def test_repr_large(self, size): return self.basic_test_repr(size) # list overallocates ~1/8th of the total size (on first expansion) so # the single list.append call puts memuse at 9 bytes per size. @bigmemtest(minsize=_2G, memuse=9) def test_append(self, size): l = [object()] * size l.append(object()) self.assertEquals(len(l), size+1) self.failUnless(l[-3] is l[-2]) self.failIf(l[-2] is l[-1]) @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) def test_count(self, size): l = [1, 2, 3, 4, 5] * size self.assertEquals(l.count(1), size) self.assertEquals(l.count("1"), 0) def basic_test_extend(self, size): l = [file] * size l.extend(l) self.assertEquals(len(l), size * 2) self.failUnless(l[0] is l[-1]) self.failUnless(l[size - 1] is l[size + 1]) @bigmemtest(minsize=_2G // 2 + 2, memuse=16) def test_extend_small(self, size): return self.basic_test_extend(size) @bigmemtest(minsize=_2G + 2, memuse=16) def test_extend_large(self, size): return self.basic_test_extend(size) @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) def test_index(self, size): l = [1L, 2L, 3L, 4L, 5L] * size size *= 5 self.assertEquals(l.index(1), 0) self.assertEquals(l.index(5, size - 5), size - 1) self.assertEquals(l.index(5, size - 5, size), size - 1) self.assertRaises(ValueError, l.index, 1, size - 4, size) self.assertRaises(ValueError, l.index, 6L) # This tests suffers from overallocation, just like test_append. @bigmemtest(minsize=_2G + 10, memuse=9) def test_insert(self, size): l = [1.0] * size l.insert(size - 1, "A") size += 1 self.assertEquals(len(l), size) self.assertEquals(l[-3:], [1.0, "A", 1.0]) l.insert(size + 1, "B") size += 1 self.assertEquals(len(l), size) self.assertEquals(l[-3:], ["A", 1.0, "B"]) l.insert(1, "C") size += 1 self.assertEquals(len(l), size) self.assertEquals(l[:3], [1.0, "C", 1.0]) self.assertEquals(l[size - 3:], ["A", 1.0, "B"]) @bigmemtest(minsize=_2G // 5 + 4, memuse=8 * 5) def test_pop(self, size): l = [u"a", u"b", u"c", u"d", u"e"] * size size *= 5 self.assertEquals(len(l), size) item = l.pop() size -= 1 self.assertEquals(len(l), size) self.assertEquals(item, u"e") self.assertEquals(l[-2:], [u"c", u"d"]) item = l.pop(0) size -= 1 self.assertEquals(len(l), size) self.assertEquals(item, u"a") self.assertEquals(l[:2], [u"b", u"c"]) item = l.pop(size - 2) size -= 1 self.assertEquals(len(l), size) self.assertEquals(item, u"c") self.assertEquals(l[-2:], [u"b", u"d"]) @bigmemtest(minsize=_2G + 10, memuse=8) def test_remove(self, size): l = [10] * size self.assertEquals(len(l), size) l.remove(10) size -= 1 self.assertEquals(len(l), size) # Because of the earlier l.remove(), this append doesn't trigger # a resize. l.append(5) size += 1 self.assertEquals(len(l), size) self.assertEquals(l[-2:], [10, 5]) l.remove(5) size -= 1 self.assertEquals(len(l), size) self.assertEquals(l[-2:], [10, 10]) @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) def test_reverse(self, size): l = [1, 2, 3, 4, 5] * size l.reverse() self.assertEquals(len(l), size * 5) self.assertEquals(l[-5:], [5, 4, 3, 2, 1]) self.assertEquals(l[:5], [5, 4, 3, 2, 1]) @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) def test_sort(self, size): l = [1, 2, 3, 4, 5] * size l.sort() self.assertEquals(len(l), size * 5) self.assertEquals(l.count(1), size) self.assertEquals(l[:10], [1] * 10) self.assertEquals(l[-10:], [5] * 10) class BufferTest(unittest.TestCase): @precisionbigmemtest(size=_1G, memuse=4) def test_repeat(self, size): try: b = buffer("AAAA")*size except MemoryError: pass # acceptable on 32-bit else: count = 0 for c in b: self.assertEquals(c, 'A') count += 1 self.assertEquals(count, size*4) def test_main(): test_support.run_unittest(StrTest, TupleTest, ListTest, BufferTest) if __name__ == '__main__': if len(sys.argv) > 1: test_support.set_memlimit(sys.argv[1]) test_main()
apache-2.0
infobloxopen/infoblox-netmri
infoblox_netmri/api/broker/v3_0_0/ipam_sync_broker.py
12
2713
from ..broker import Broker class IpamSyncBroker(Broker): controller = "ipam_sync" def status(self, **kwargs): """Gets the highest SeqNo available currently of a given IPAM object. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param type: The ipam object type to indicate either "network" or "ip". :type type: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return seq_no: The highest sequence number of a given IPAM object :rtype seq_no: Integer """ return self.api_request(self._get_method_fullname("status"), kwargs) def send_refresh(self, **kwargs): """Sends refresh message to ipam sync queue consumed by ipam_syncd. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param type: The ipam object type to indicate either "network" or "ip". :type type: String | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param since_seq_no: sequence number to start from to send ipam objects :type since_seq_no: Integer **Outputs** """ return self.api_request(self._get_method_fullname("send_refresh"), kwargs) def send_ip_objects_by_range(self, **kwargs): """Sends refresh message to ipam ip sync queue consumed by ipam_ip_syncd. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param network_view_id: network view id :type network_view_id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param ip_start: start of ip address range :type ip_start: String | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param ip_end: end of ip address range :type ip_end: String **Outputs** """ return self.api_request(self._get_method_fullname("send_ip_objects_by_range"), kwargs)
apache-2.0
bixbydev/Bixby
google/gdata-2.0.18/build/lib.linux-x86_64-2.7/gdata/tlslite/__init__.py
409
1129
""" TLS Lite is a free python library that implements SSL v3, TLS v1, and TLS v1.1. TLS Lite supports non-traditional authentication methods such as SRP, shared keys, and cryptoIDs, in addition to X.509 certificates. TLS Lite is pure python, however it can access OpenSSL, cryptlib, pycrypto, and GMPY for faster crypto operations. TLS Lite integrates with httplib, xmlrpclib, poplib, imaplib, smtplib, SocketServer, asyncore, and Twisted. To use, do:: from tlslite.api import * Then use the L{tlslite.TLSConnection.TLSConnection} class with a socket, or use one of the integration classes in L{tlslite.integration}. @version: 0.3.8 """ __version__ = "0.3.8" __all__ = ["api", "BaseDB", "Checker", "constants", "errors", "FileObject", "HandshakeSettings", "mathtls", "messages", "Session", "SessionCache", "SharedKeyDB", "TLSConnection", "TLSRecordLayer", "VerifierDB", "X509", "X509CertChain", "integration", "utils"]
gpl-3.0
int19h/PTVS
Python/Product/Miniconda/Miniconda3-x64/Lib/zipfile.py
6
80379
""" Read and write ZIP files. XXX references to utf-8 need further investigation. """ import io import os import importlib.util import sys import time import stat import shutil import struct import binascii import threading try: import zlib # We may need its compression method crc32 = zlib.crc32 except ImportError: zlib = None crc32 = binascii.crc32 try: import bz2 # We may need its compression method except ImportError: bz2 = None try: import lzma # We may need its compression method except ImportError: lzma = None __all__ = ["BadZipFile", "BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "ZIP_BZIP2", "ZIP_LZMA", "is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile"] class BadZipFile(Exception): pass class LargeZipFile(Exception): """ Raised when writing a zipfile, the zipfile requires ZIP64 extensions and those extensions are disabled. """ error = BadZipfile = BadZipFile # Pre-3.2 compatibility names ZIP64_LIMIT = (1 << 31) - 1 ZIP_FILECOUNT_LIMIT = (1 << 16) - 1 ZIP_MAX_COMMENT = (1 << 16) - 1 # constants for Zip file compression methods ZIP_STORED = 0 ZIP_DEFLATED = 8 ZIP_BZIP2 = 12 ZIP_LZMA = 14 # Other ZIP compression methods not supported DEFAULT_VERSION = 20 ZIP64_VERSION = 45 BZIP2_VERSION = 46 LZMA_VERSION = 63 # we recognize (but not necessarily support) all features up to that version MAX_EXTRACT_VERSION = 63 # Below are some formats and associated data for reading/writing headers using # the struct module. The names and structures of headers/records are those used # in the PKWARE description of the ZIP file format: # http://www.pkware.com/documents/casestudies/APPNOTE.TXT # (URL valid as of January 2008) # The "end of central directory" structure, magic number, size, and indices # (section V.I in the format document) structEndArchive = b"<4s4H2LH" stringEndArchive = b"PK\005\006" sizeEndCentDir = struct.calcsize(structEndArchive) _ECD_SIGNATURE = 0 _ECD_DISK_NUMBER = 1 _ECD_DISK_START = 2 _ECD_ENTRIES_THIS_DISK = 3 _ECD_ENTRIES_TOTAL = 4 _ECD_SIZE = 5 _ECD_OFFSET = 6 _ECD_COMMENT_SIZE = 7 # These last two indices are not part of the structure as defined in the # spec, but they are used internally by this module as a convenience _ECD_COMMENT = 8 _ECD_LOCATION = 9 # The "central directory" structure, magic number, size, and indices # of entries in the structure (section V.F in the format document) structCentralDir = "<4s4B4HL2L5H2L" stringCentralDir = b"PK\001\002" sizeCentralDir = struct.calcsize(structCentralDir) # indexes of entries in the central directory structure _CD_SIGNATURE = 0 _CD_CREATE_VERSION = 1 _CD_CREATE_SYSTEM = 2 _CD_EXTRACT_VERSION = 3 _CD_EXTRACT_SYSTEM = 4 _CD_FLAG_BITS = 5 _CD_COMPRESS_TYPE = 6 _CD_TIME = 7 _CD_DATE = 8 _CD_CRC = 9 _CD_COMPRESSED_SIZE = 10 _CD_UNCOMPRESSED_SIZE = 11 _CD_FILENAME_LENGTH = 12 _CD_EXTRA_FIELD_LENGTH = 13 _CD_COMMENT_LENGTH = 14 _CD_DISK_NUMBER_START = 15 _CD_INTERNAL_FILE_ATTRIBUTES = 16 _CD_EXTERNAL_FILE_ATTRIBUTES = 17 _CD_LOCAL_HEADER_OFFSET = 18 # The "local file header" structure, magic number, size, and indices # (section V.A in the format document) structFileHeader = "<4s2B4HL2L2H" stringFileHeader = b"PK\003\004" sizeFileHeader = struct.calcsize(structFileHeader) _FH_SIGNATURE = 0 _FH_EXTRACT_VERSION = 1 _FH_EXTRACT_SYSTEM = 2 _FH_GENERAL_PURPOSE_FLAG_BITS = 3 _FH_COMPRESSION_METHOD = 4 _FH_LAST_MOD_TIME = 5 _FH_LAST_MOD_DATE = 6 _FH_CRC = 7 _FH_COMPRESSED_SIZE = 8 _FH_UNCOMPRESSED_SIZE = 9 _FH_FILENAME_LENGTH = 10 _FH_EXTRA_FIELD_LENGTH = 11 # The "Zip64 end of central directory locator" structure, magic number, and size structEndArchive64Locator = "<4sLQL" stringEndArchive64Locator = b"PK\x06\x07" sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator) # The "Zip64 end of central directory" record, magic number, size, and indices # (section V.G in the format document) structEndArchive64 = "<4sQ2H2L4Q" stringEndArchive64 = b"PK\x06\x06" sizeEndCentDir64 = struct.calcsize(structEndArchive64) _CD64_SIGNATURE = 0 _CD64_DIRECTORY_RECSIZE = 1 _CD64_CREATE_VERSION = 2 _CD64_EXTRACT_VERSION = 3 _CD64_DISK_NUMBER = 4 _CD64_DISK_NUMBER_START = 5 _CD64_NUMBER_ENTRIES_THIS_DISK = 6 _CD64_NUMBER_ENTRIES_TOTAL = 7 _CD64_DIRECTORY_SIZE = 8 _CD64_OFFSET_START_CENTDIR = 9 _DD_SIGNATURE = 0x08074b50 _EXTRA_FIELD_STRUCT = struct.Struct('<HH') def _strip_extra(extra, xids): # Remove Extra Fields with specified IDs. unpack = _EXTRA_FIELD_STRUCT.unpack modified = False buffer = [] start = i = 0 while i + 4 <= len(extra): xid, xlen = unpack(extra[i : i + 4]) j = i + 4 + xlen if xid in xids: if i != start: buffer.append(extra[start : i]) start = j modified = True i = j if not modified: return extra return b''.join(buffer) def _check_zipfile(fp): try: if _EndRecData(fp): return True # file has correct magic number except OSError: pass return False def is_zipfile(filename): """Quickly see if a file is a ZIP file by checking the magic number. The filename argument may be a file or file-like object too. """ result = False try: if hasattr(filename, "read"): result = _check_zipfile(fp=filename) else: with open(filename, "rb") as fp: result = _check_zipfile(fp) except OSError: pass return result def _EndRecData64(fpin, offset, endrec): """ Read the ZIP64 end-of-archive records and use that to update endrec """ try: fpin.seek(offset - sizeEndCentDir64Locator, 2) except OSError: # If the seek fails, the file is not large enough to contain a ZIP64 # end-of-archive record, so just return the end record we were given. return endrec data = fpin.read(sizeEndCentDir64Locator) if len(data) != sizeEndCentDir64Locator: return endrec sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data) if sig != stringEndArchive64Locator: return endrec if diskno != 0 or disks != 1: raise BadZipFile("zipfiles that span multiple disks are not supported") # Assume no 'zip64 extensible data' fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2) data = fpin.read(sizeEndCentDir64) if len(data) != sizeEndCentDir64: return endrec sig, sz, create_version, read_version, disk_num, disk_dir, \ dircount, dircount2, dirsize, diroffset = \ struct.unpack(structEndArchive64, data) if sig != stringEndArchive64: return endrec # Update the original endrec using data from the ZIP64 record endrec[_ECD_SIGNATURE] = sig endrec[_ECD_DISK_NUMBER] = disk_num endrec[_ECD_DISK_START] = disk_dir endrec[_ECD_ENTRIES_THIS_DISK] = dircount endrec[_ECD_ENTRIES_TOTAL] = dircount2 endrec[_ECD_SIZE] = dirsize endrec[_ECD_OFFSET] = diroffset return endrec def _EndRecData(fpin): """Return data from the "End of Central Directory" record, or None. The data is a list of the nine items in the ZIP "End of central dir" record followed by a tenth item, the file seek offset of this record.""" # Determine file size fpin.seek(0, 2) filesize = fpin.tell() # Check to see if this is ZIP file with no archive comment (the # "end of central directory" structure should be the last item in the # file if this is the case). try: fpin.seek(-sizeEndCentDir, 2) except OSError: return None data = fpin.read() if (len(data) == sizeEndCentDir and data[0:4] == stringEndArchive and data[-2:] == b"\000\000"): # the signature is correct and there's no comment, unpack structure endrec = struct.unpack(structEndArchive, data) endrec=list(endrec) # Append a blank comment and record start offset endrec.append(b"") endrec.append(filesize - sizeEndCentDir) # Try to read the "Zip64 end of central directory" structure return _EndRecData64(fpin, -sizeEndCentDir, endrec) # Either this is not a ZIP file, or it is a ZIP file with an archive # comment. Search the end of the file for the "end of central directory" # record signature. The comment is the last item in the ZIP file and may be # up to 64K long. It is assumed that the "end of central directory" magic # number does not appear in the comment. maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0) fpin.seek(maxCommentStart, 0) data = fpin.read() start = data.rfind(stringEndArchive) if start >= 0: # found the magic number; attempt to unpack and interpret recData = data[start:start+sizeEndCentDir] if len(recData) != sizeEndCentDir: # Zip file is corrupted. return None endrec = list(struct.unpack(structEndArchive, recData)) commentSize = endrec[_ECD_COMMENT_SIZE] #as claimed by the zip file comment = data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize] endrec.append(comment) endrec.append(maxCommentStart + start) # Try to read the "Zip64 end of central directory" structure return _EndRecData64(fpin, maxCommentStart + start - filesize, endrec) # Unable to find a valid end of central directory structure return None class ZipInfo (object): """Class with attributes describing each file in the ZIP archive.""" __slots__ = ( 'orig_filename', 'filename', 'date_time', 'compress_type', '_compresslevel', 'comment', 'extra', 'create_system', 'create_version', 'extract_version', 'reserved', 'flag_bits', 'volume', 'internal_attr', 'external_attr', 'header_offset', 'CRC', 'compress_size', 'file_size', '_raw_time', ) def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)): self.orig_filename = filename # Original file name in archive # Terminate the file name at the first null byte. Null bytes in file # names are used as tricks by viruses in archives. null_byte = filename.find(chr(0)) if null_byte >= 0: filename = filename[0:null_byte] # This is used to ensure paths in generated ZIP files always use # forward slashes as the directory separator, as required by the # ZIP format specification. if os.sep != "/" and os.sep in filename: filename = filename.replace(os.sep, "/") self.filename = filename # Normalized file name self.date_time = date_time # year, month, day, hour, min, sec if date_time[0] < 1980: raise ValueError('ZIP does not support timestamps before 1980') # Standard values: self.compress_type = ZIP_STORED # Type of compression for the file self._compresslevel = None # Level for the compressor self.comment = b"" # Comment for each file self.extra = b"" # ZIP extra data if sys.platform == 'win32': self.create_system = 0 # System which created ZIP archive else: # Assume everything else is unix-y self.create_system = 3 # System which created ZIP archive self.create_version = DEFAULT_VERSION # Version which created ZIP archive self.extract_version = DEFAULT_VERSION # Version needed to extract archive self.reserved = 0 # Must be zero self.flag_bits = 0 # ZIP flag bits self.volume = 0 # Volume number of file header self.internal_attr = 0 # Internal attributes self.external_attr = 0 # External file attributes # Other attributes are set by class ZipFile: # header_offset Byte offset to the file header # CRC CRC-32 of the uncompressed file # compress_size Size of the compressed file # file_size Size of the uncompressed file def __repr__(self): result = ['<%s filename=%r' % (self.__class__.__name__, self.filename)] if self.compress_type != ZIP_STORED: result.append(' compress_type=%s' % compressor_names.get(self.compress_type, self.compress_type)) hi = self.external_attr >> 16 lo = self.external_attr & 0xFFFF if hi: result.append(' filemode=%r' % stat.filemode(hi)) if lo: result.append(' external_attr=%#x' % lo) isdir = self.is_dir() if not isdir or self.file_size: result.append(' file_size=%r' % self.file_size) if ((not isdir or self.compress_size) and (self.compress_type != ZIP_STORED or self.file_size != self.compress_size)): result.append(' compress_size=%r' % self.compress_size) result.append('>') return ''.join(result) def FileHeader(self, zip64=None): """Return the per-file header as a bytes object.""" dt = self.date_time dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2] dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2) if self.flag_bits & 0x08: # Set these to zero because we write them after the file data CRC = compress_size = file_size = 0 else: CRC = self.CRC compress_size = self.compress_size file_size = self.file_size extra = self.extra min_version = 0 if zip64 is None: zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT if zip64: fmt = '<HHQQ' extra = extra + struct.pack(fmt, 1, struct.calcsize(fmt)-4, file_size, compress_size) if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT: if not zip64: raise LargeZipFile("Filesize would require ZIP64 extensions") # File is larger than what fits into a 4 byte integer, # fall back to the ZIP64 extension file_size = 0xffffffff compress_size = 0xffffffff min_version = ZIP64_VERSION if self.compress_type == ZIP_BZIP2: min_version = max(BZIP2_VERSION, min_version) elif self.compress_type == ZIP_LZMA: min_version = max(LZMA_VERSION, min_version) self.extract_version = max(min_version, self.extract_version) self.create_version = max(min_version, self.create_version) filename, flag_bits = self._encodeFilenameFlags() header = struct.pack(structFileHeader, stringFileHeader, self.extract_version, self.reserved, flag_bits, self.compress_type, dostime, dosdate, CRC, compress_size, file_size, len(filename), len(extra)) return header + filename + extra def _encodeFilenameFlags(self): try: return self.filename.encode('ascii'), self.flag_bits except UnicodeEncodeError: return self.filename.encode('utf-8'), self.flag_bits | 0x800 def _decodeExtra(self): # Try to decode the extra field. extra = self.extra unpack = struct.unpack while len(extra) >= 4: tp, ln = unpack('<HH', extra[:4]) if ln+4 > len(extra): raise BadZipFile("Corrupt extra field %04x (size=%d)" % (tp, ln)) if tp == 0x0001: if ln >= 24: counts = unpack('<QQQ', extra[4:28]) elif ln == 16: counts = unpack('<QQ', extra[4:20]) elif ln == 8: counts = unpack('<Q', extra[4:12]) elif ln == 0: counts = () else: raise BadZipFile("Corrupt extra field %04x (size=%d)" % (tp, ln)) idx = 0 # ZIP64 extension (large files and/or large archives) if self.file_size in (0xffffffffffffffff, 0xffffffff): self.file_size = counts[idx] idx += 1 if self.compress_size == 0xFFFFFFFF: self.compress_size = counts[idx] idx += 1 if self.header_offset == 0xffffffff: old = self.header_offset self.header_offset = counts[idx] idx+=1 extra = extra[ln+4:] @classmethod def from_file(cls, filename, arcname=None): """Construct an appropriate ZipInfo for a file on the filesystem. filename should be the path to a file or directory on the filesystem. arcname is the name which it will have within the archive (by default, this will be the same as filename, but without a drive letter and with leading path separators removed). """ if isinstance(filename, os.PathLike): filename = os.fspath(filename) st = os.stat(filename) isdir = stat.S_ISDIR(st.st_mode) mtime = time.localtime(st.st_mtime) date_time = mtime[0:6] # Create ZipInfo instance to store file information if arcname is None: arcname = filename arcname = os.path.normpath(os.path.splitdrive(arcname)[1]) while arcname[0] in (os.sep, os.altsep): arcname = arcname[1:] if isdir: arcname += '/' zinfo = cls(arcname, date_time) zinfo.external_attr = (st.st_mode & 0xFFFF) << 16 # Unix attributes if isdir: zinfo.file_size = 0 zinfo.external_attr |= 0x10 # MS-DOS directory flag else: zinfo.file_size = st.st_size return zinfo def is_dir(self): """Return True if this archive member is a directory.""" return self.filename[-1] == '/' # ZIP encryption uses the CRC32 one-byte primitive for scrambling some # internal keys. We noticed that a direct implementation is faster than # relying on binascii.crc32(). _crctable = None def _gen_crc(crc): for j in range(8): if crc & 1: crc = (crc >> 1) ^ 0xEDB88320 else: crc >>= 1 return crc # ZIP supports a password-based form of encryption. Even though known # plaintext attacks have been found against it, it is still useful # to be able to get data out of such a file. # # Usage: # zd = _ZipDecrypter(mypwd) # plain_bytes = zd(cypher_bytes) def _ZipDecrypter(pwd): key0 = 305419896 key1 = 591751049 key2 = 878082192 global _crctable if _crctable is None: _crctable = list(map(_gen_crc, range(256))) crctable = _crctable def crc32(ch, crc): """Compute the CRC32 primitive on one byte.""" return (crc >> 8) ^ crctable[(crc ^ ch) & 0xFF] def update_keys(c): nonlocal key0, key1, key2 key0 = crc32(c, key0) key1 = (key1 + (key0 & 0xFF)) & 0xFFFFFFFF key1 = (key1 * 134775813 + 1) & 0xFFFFFFFF key2 = crc32(key1 >> 24, key2) for p in pwd: update_keys(p) def decrypter(data): """Decrypt a bytes object.""" result = bytearray() append = result.append for c in data: k = key2 | 2 c ^= ((k * (k^1)) >> 8) & 0xFF update_keys(c) append(c) return bytes(result) return decrypter class LZMACompressor: def __init__(self): self._comp = None def _init(self): props = lzma._encode_filter_properties({'id': lzma.FILTER_LZMA1}) self._comp = lzma.LZMACompressor(lzma.FORMAT_RAW, filters=[ lzma._decode_filter_properties(lzma.FILTER_LZMA1, props) ]) return struct.pack('<BBH', 9, 4, len(props)) + props def compress(self, data): if self._comp is None: return self._init() + self._comp.compress(data) return self._comp.compress(data) def flush(self): if self._comp is None: return self._init() + self._comp.flush() return self._comp.flush() class LZMADecompressor: def __init__(self): self._decomp = None self._unconsumed = b'' self.eof = False def decompress(self, data): if self._decomp is None: self._unconsumed += data if len(self._unconsumed) <= 4: return b'' psize, = struct.unpack('<H', self._unconsumed[2:4]) if len(self._unconsumed) <= 4 + psize: return b'' self._decomp = lzma.LZMADecompressor(lzma.FORMAT_RAW, filters=[ lzma._decode_filter_properties(lzma.FILTER_LZMA1, self._unconsumed[4:4 + psize]) ]) data = self._unconsumed[4 + psize:] del self._unconsumed result = self._decomp.decompress(data) self.eof = self._decomp.eof return result compressor_names = { 0: 'store', 1: 'shrink', 2: 'reduce', 3: 'reduce', 4: 'reduce', 5: 'reduce', 6: 'implode', 7: 'tokenize', 8: 'deflate', 9: 'deflate64', 10: 'implode', 12: 'bzip2', 14: 'lzma', 18: 'terse', 19: 'lz77', 97: 'wavpack', 98: 'ppmd', } def _check_compression(compression): if compression == ZIP_STORED: pass elif compression == ZIP_DEFLATED: if not zlib: raise RuntimeError( "Compression requires the (missing) zlib module") elif compression == ZIP_BZIP2: if not bz2: raise RuntimeError( "Compression requires the (missing) bz2 module") elif compression == ZIP_LZMA: if not lzma: raise RuntimeError( "Compression requires the (missing) lzma module") else: raise NotImplementedError("That compression method is not supported") def _get_compressor(compress_type, compresslevel=None): if compress_type == ZIP_DEFLATED: if compresslevel is not None: return zlib.compressobj(compresslevel, zlib.DEFLATED, -15) return zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15) elif compress_type == ZIP_BZIP2: if compresslevel is not None: return bz2.BZ2Compressor(compresslevel) return bz2.BZ2Compressor() # compresslevel is ignored for ZIP_LZMA elif compress_type == ZIP_LZMA: return LZMACompressor() else: return None def _get_decompressor(compress_type): if compress_type == ZIP_STORED: return None elif compress_type == ZIP_DEFLATED: return zlib.decompressobj(-15) elif compress_type == ZIP_BZIP2: return bz2.BZ2Decompressor() elif compress_type == ZIP_LZMA: return LZMADecompressor() else: descr = compressor_names.get(compress_type) if descr: raise NotImplementedError("compression type %d (%s)" % (compress_type, descr)) else: raise NotImplementedError("compression type %d" % (compress_type,)) class _SharedFile: def __init__(self, file, pos, close, lock, writing): self._file = file self._pos = pos self._close = close self._lock = lock self._writing = writing self.seekable = file.seekable self.tell = file.tell def seek(self, offset, whence=0): with self._lock: if self._writing(): raise ValueError("Can't reposition in the ZIP file while " "there is an open writing handle on it. " "Close the writing handle before trying to read.") self._file.seek(offset, whence) self._pos = self._file.tell() return self._pos def read(self, n=-1): with self._lock: if self._writing(): raise ValueError("Can't read from the ZIP file while there " "is an open writing handle on it. " "Close the writing handle before trying to read.") self._file.seek(self._pos) data = self._file.read(n) self._pos = self._file.tell() return data def close(self): if self._file is not None: fileobj = self._file self._file = None self._close(fileobj) # Provide the tell method for unseekable stream class _Tellable: def __init__(self, fp): self.fp = fp self.offset = 0 def write(self, data): n = self.fp.write(data) self.offset += n return n def tell(self): return self.offset def flush(self): self.fp.flush() def close(self): self.fp.close() class ZipExtFile(io.BufferedIOBase): """File-like object for reading an archive member. Is returned by ZipFile.open(). """ # Max size supported by decompressor. MAX_N = 1 << 31 - 1 # Read from compressed files in 4k blocks. MIN_READ_SIZE = 4096 # Chunk size to read during seek MAX_SEEK_READ = 1 << 24 def __init__(self, fileobj, mode, zipinfo, decrypter=None, close_fileobj=False): self._fileobj = fileobj self._decrypter = decrypter self._close_fileobj = close_fileobj self._compress_type = zipinfo.compress_type self._compress_left = zipinfo.compress_size self._left = zipinfo.file_size self._decompressor = _get_decompressor(self._compress_type) self._eof = False self._readbuffer = b'' self._offset = 0 self.newlines = None # Adjust read size for encrypted files since the first 12 bytes # are for the encryption/password information. if self._decrypter is not None: self._compress_left -= 12 self.mode = mode self.name = zipinfo.filename if hasattr(zipinfo, 'CRC'): self._expected_crc = zipinfo.CRC self._running_crc = crc32(b'') else: self._expected_crc = None self._seekable = False try: if fileobj.seekable(): self._orig_compress_start = fileobj.tell() self._orig_compress_size = zipinfo.compress_size self._orig_file_size = zipinfo.file_size self._orig_start_crc = self._running_crc self._seekable = True except AttributeError: pass def __repr__(self): result = ['<%s.%s' % (self.__class__.__module__, self.__class__.__qualname__)] if not self.closed: result.append(' name=%r mode=%r' % (self.name, self.mode)) if self._compress_type != ZIP_STORED: result.append(' compress_type=%s' % compressor_names.get(self._compress_type, self._compress_type)) else: result.append(' [closed]') result.append('>') return ''.join(result) def readline(self, limit=-1): """Read and return a line from the stream. If limit is specified, at most limit bytes will be read. """ if limit < 0: # Shortcut common case - newline found in buffer. i = self._readbuffer.find(b'\n', self._offset) + 1 if i > 0: line = self._readbuffer[self._offset: i] self._offset = i return line return io.BufferedIOBase.readline(self, limit) def peek(self, n=1): """Returns buffered bytes without advancing the position.""" if n > len(self._readbuffer) - self._offset: chunk = self.read(n) if len(chunk) > self._offset: self._readbuffer = chunk + self._readbuffer[self._offset:] self._offset = 0 else: self._offset -= len(chunk) # Return up to 512 bytes to reduce allocation overhead for tight loops. return self._readbuffer[self._offset: self._offset + 512] def readable(self): return True def read(self, n=-1): """Read and return up to n bytes. If the argument is omitted, None, or negative, data is read and returned until EOF is reached.. """ if n is None or n < 0: buf = self._readbuffer[self._offset:] self._readbuffer = b'' self._offset = 0 while not self._eof: buf += self._read1(self.MAX_N) return buf end = n + self._offset if end < len(self._readbuffer): buf = self._readbuffer[self._offset:end] self._offset = end return buf n = end - len(self._readbuffer) buf = self._readbuffer[self._offset:] self._readbuffer = b'' self._offset = 0 while n > 0 and not self._eof: data = self._read1(n) if n < len(data): self._readbuffer = data self._offset = n buf += data[:n] break buf += data n -= len(data) return buf def _update_crc(self, newdata): # Update the CRC using the given data. if self._expected_crc is None: # No need to compute the CRC if we don't have a reference value return self._running_crc = crc32(newdata, self._running_crc) # Check the CRC if we're at the end of the file if self._eof and self._running_crc != self._expected_crc: raise BadZipFile("Bad CRC-32 for file %r" % self.name) def read1(self, n): """Read up to n bytes with at most one read() system call.""" if n is None or n < 0: buf = self._readbuffer[self._offset:] self._readbuffer = b'' self._offset = 0 while not self._eof: data = self._read1(self.MAX_N) if data: buf += data break return buf end = n + self._offset if end < len(self._readbuffer): buf = self._readbuffer[self._offset:end] self._offset = end return buf n = end - len(self._readbuffer) buf = self._readbuffer[self._offset:] self._readbuffer = b'' self._offset = 0 if n > 0: while not self._eof: data = self._read1(n) if n < len(data): self._readbuffer = data self._offset = n buf += data[:n] break if data: buf += data break return buf def _read1(self, n): # Read up to n compressed bytes with at most one read() system call, # decrypt and decompress them. if self._eof or n <= 0: return b'' # Read from file. if self._compress_type == ZIP_DEFLATED: ## Handle unconsumed data. data = self._decompressor.unconsumed_tail if n > len(data): data += self._read2(n - len(data)) else: data = self._read2(n) if self._compress_type == ZIP_STORED: self._eof = self._compress_left <= 0 elif self._compress_type == ZIP_DEFLATED: n = max(n, self.MIN_READ_SIZE) data = self._decompressor.decompress(data, n) self._eof = (self._decompressor.eof or self._compress_left <= 0 and not self._decompressor.unconsumed_tail) if self._eof: data += self._decompressor.flush() else: data = self._decompressor.decompress(data) self._eof = self._decompressor.eof or self._compress_left <= 0 data = data[:self._left] self._left -= len(data) if self._left <= 0: self._eof = True self._update_crc(data) return data def _read2(self, n): if self._compress_left <= 0: return b'' n = max(n, self.MIN_READ_SIZE) n = min(n, self._compress_left) data = self._fileobj.read(n) self._compress_left -= len(data) if not data: raise EOFError if self._decrypter is not None: data = self._decrypter(data) return data def close(self): try: if self._close_fileobj: self._fileobj.close() finally: super().close() def seekable(self): return self._seekable def seek(self, offset, whence=0): if not self._seekable: raise io.UnsupportedOperation("underlying stream is not seekable") curr_pos = self.tell() if whence == 0: # Seek from start of file new_pos = offset elif whence == 1: # Seek from current position new_pos = curr_pos + offset elif whence == 2: # Seek from EOF new_pos = self._orig_file_size + offset else: raise ValueError("whence must be os.SEEK_SET (0), " "os.SEEK_CUR (1), or os.SEEK_END (2)") if new_pos > self._orig_file_size: new_pos = self._orig_file_size if new_pos < 0: new_pos = 0 read_offset = new_pos - curr_pos buff_offset = read_offset + self._offset if buff_offset >= 0 and buff_offset < len(self._readbuffer): # Just move the _offset index if the new position is in the _readbuffer self._offset = buff_offset read_offset = 0 elif read_offset < 0: # Position is before the current position. Reset the ZipExtFile self._fileobj.seek(self._orig_compress_start) self._running_crc = self._orig_start_crc self._compress_left = self._orig_compress_size self._left = self._orig_file_size self._readbuffer = b'' self._offset = 0 self._decompressor = _get_decompressor(self._compress_type) self._eof = False read_offset = new_pos while read_offset > 0: read_len = min(self.MAX_SEEK_READ, read_offset) self.read(read_len) read_offset -= read_len return self.tell() def tell(self): if not self._seekable: raise io.UnsupportedOperation("underlying stream is not seekable") filepos = self._orig_file_size - self._left - len(self._readbuffer) + self._offset return filepos class _ZipWriteFile(io.BufferedIOBase): def __init__(self, zf, zinfo, zip64): self._zinfo = zinfo self._zip64 = zip64 self._zipfile = zf self._compressor = _get_compressor(zinfo.compress_type, zinfo._compresslevel) self._file_size = 0 self._compress_size = 0 self._crc = 0 @property def _fileobj(self): return self._zipfile.fp def writable(self): return True def write(self, data): if self.closed: raise ValueError('I/O operation on closed file.') nbytes = len(data) self._file_size += nbytes self._crc = crc32(data, self._crc) if self._compressor: data = self._compressor.compress(data) self._compress_size += len(data) self._fileobj.write(data) return nbytes def close(self): if self.closed: return super().close() # Flush any data from the compressor, and update header info if self._compressor: buf = self._compressor.flush() self._compress_size += len(buf) self._fileobj.write(buf) self._zinfo.compress_size = self._compress_size else: self._zinfo.compress_size = self._file_size self._zinfo.CRC = self._crc self._zinfo.file_size = self._file_size # Write updated header info if self._zinfo.flag_bits & 0x08: # Write CRC and file sizes after the file data fmt = '<LLQQ' if self._zip64 else '<LLLL' self._fileobj.write(struct.pack(fmt, _DD_SIGNATURE, self._zinfo.CRC, self._zinfo.compress_size, self._zinfo.file_size)) self._zipfile.start_dir = self._fileobj.tell() else: if not self._zip64: if self._file_size > ZIP64_LIMIT: raise RuntimeError('File size unexpectedly exceeded ZIP64 ' 'limit') if self._compress_size > ZIP64_LIMIT: raise RuntimeError('Compressed size unexpectedly exceeded ' 'ZIP64 limit') # Seek backwards and write file header (which will now include # correct CRC and file sizes) # Preserve current position in file self._zipfile.start_dir = self._fileobj.tell() self._fileobj.seek(self._zinfo.header_offset) self._fileobj.write(self._zinfo.FileHeader(self._zip64)) self._fileobj.seek(self._zipfile.start_dir) self._zipfile._writing = False # Successfully written: Add file to our caches self._zipfile.filelist.append(self._zinfo) self._zipfile.NameToInfo[self._zinfo.filename] = self._zinfo class ZipFile: """ Class with methods to open, read, write, close, list zip files. z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=True, compresslevel=None) file: Either the path to the file, or a file-like object. If it is a path, the file will be opened and closed by ZipFile. mode: The mode can be either read 'r', write 'w', exclusive create 'x', or append 'a'. compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), ZIP_BZIP2 (requires bz2) or ZIP_LZMA (requires lzma). allowZip64: if True ZipFile will create files with ZIP64 extensions when needed, otherwise it will raise an exception when this would be necessary. compresslevel: None (default for the given compression type) or an integer specifying the level to pass to the compressor. When using ZIP_STORED or ZIP_LZMA this keyword has no effect. When using ZIP_DEFLATED integers 0 through 9 are accepted. When using ZIP_BZIP2 integers 1 through 9 are accepted. """ fp = None # Set here since __del__ checks it _windows_illegal_name_trans_table = None def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=True, compresslevel=None): """Open the ZIP file with mode read 'r', write 'w', exclusive create 'x', or append 'a'.""" if mode not in ('r', 'w', 'x', 'a'): raise ValueError("ZipFile requires mode 'r', 'w', 'x', or 'a'") _check_compression(compression) self._allowZip64 = allowZip64 self._didModify = False self.debug = 0 # Level of printing: 0 through 3 self.NameToInfo = {} # Find file info given name self.filelist = [] # List of ZipInfo instances for archive self.compression = compression # Method of compression self.compresslevel = compresslevel self.mode = mode self.pwd = None self._comment = b'' # Check if we were passed a file-like object if isinstance(file, os.PathLike): file = os.fspath(file) if isinstance(file, str): # No, it's a filename self._filePassed = 0 self.filename = file modeDict = {'r' : 'rb', 'w': 'w+b', 'x': 'x+b', 'a' : 'r+b', 'r+b': 'w+b', 'w+b': 'wb', 'x+b': 'xb'} filemode = modeDict[mode] while True: try: self.fp = io.open(file, filemode) except OSError: if filemode in modeDict: filemode = modeDict[filemode] continue raise break else: self._filePassed = 1 self.fp = file self.filename = getattr(file, 'name', None) self._fileRefCnt = 1 self._lock = threading.RLock() self._seekable = True self._writing = False try: if mode == 'r': self._RealGetContents() elif mode in ('w', 'x'): # set the modified flag so central directory gets written # even if no files are added to the archive self._didModify = True try: self.start_dir = self.fp.tell() except (AttributeError, OSError): self.fp = _Tellable(self.fp) self.start_dir = 0 self._seekable = False else: # Some file-like objects can provide tell() but not seek() try: self.fp.seek(self.start_dir) except (AttributeError, OSError): self._seekable = False elif mode == 'a': try: # See if file is a zip file self._RealGetContents() # seek to start of directory and overwrite self.fp.seek(self.start_dir) except BadZipFile: # file is not a zip file, just append self.fp.seek(0, 2) # set the modified flag so central directory gets written # even if no files are added to the archive self._didModify = True self.start_dir = self.fp.tell() else: raise ValueError("Mode must be 'r', 'w', 'x', or 'a'") except: fp = self.fp self.fp = None self._fpclose(fp) raise def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def __repr__(self): result = ['<%s.%s' % (self.__class__.__module__, self.__class__.__qualname__)] if self.fp is not None: if self._filePassed: result.append(' file=%r' % self.fp) elif self.filename is not None: result.append(' filename=%r' % self.filename) result.append(' mode=%r' % self.mode) else: result.append(' [closed]') result.append('>') return ''.join(result) def _RealGetContents(self): """Read in the table of contents for the ZIP file.""" fp = self.fp try: endrec = _EndRecData(fp) except OSError: raise BadZipFile("File is not a zip file") if not endrec: raise BadZipFile("File is not a zip file") if self.debug > 1: print(endrec) size_cd = endrec[_ECD_SIZE] # bytes in central directory offset_cd = endrec[_ECD_OFFSET] # offset of central directory self._comment = endrec[_ECD_COMMENT] # archive comment # "concat" is zero, unless zip was concatenated to another file concat = endrec[_ECD_LOCATION] - size_cd - offset_cd if endrec[_ECD_SIGNATURE] == stringEndArchive64: # If Zip64 extension structures are present, account for them concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator) if self.debug > 2: inferred = concat + offset_cd print("given, inferred, offset", offset_cd, inferred, concat) # self.start_dir: Position of start of central directory self.start_dir = offset_cd + concat fp.seek(self.start_dir, 0) data = fp.read(size_cd) fp = io.BytesIO(data) total = 0 while total < size_cd: centdir = fp.read(sizeCentralDir) if len(centdir) != sizeCentralDir: raise BadZipFile("Truncated central directory") centdir = struct.unpack(structCentralDir, centdir) if centdir[_CD_SIGNATURE] != stringCentralDir: raise BadZipFile("Bad magic number for central directory") if self.debug > 2: print(centdir) filename = fp.read(centdir[_CD_FILENAME_LENGTH]) flags = centdir[5] if flags & 0x800: # UTF-8 file names extension filename = filename.decode('utf-8') else: # Historical ZIP filename encoding filename = filename.decode('cp437') # Create ZipInfo instance to store file information x = ZipInfo(filename) x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH]) x.comment = fp.read(centdir[_CD_COMMENT_LENGTH]) x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET] (x.create_version, x.create_system, x.extract_version, x.reserved, x.flag_bits, x.compress_type, t, d, x.CRC, x.compress_size, x.file_size) = centdir[1:12] if x.extract_version > MAX_EXTRACT_VERSION: raise NotImplementedError("zip file version %.1f" % (x.extract_version / 10)) x.volume, x.internal_attr, x.external_attr = centdir[15:18] # Convert date/time code to (year, month, day, hour, min, sec) x._raw_time = t x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F, t>>11, (t>>5)&0x3F, (t&0x1F) * 2 ) x._decodeExtra() x.header_offset = x.header_offset + concat self.filelist.append(x) self.NameToInfo[x.filename] = x # update total bytes read from central directory total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH] + centdir[_CD_EXTRA_FIELD_LENGTH] + centdir[_CD_COMMENT_LENGTH]) if self.debug > 2: print("total", total) def namelist(self): """Return a list of file names in the archive.""" return [data.filename for data in self.filelist] def infolist(self): """Return a list of class ZipInfo instances for files in the archive.""" return self.filelist def printdir(self, file=None): """Print a table of contents for the zip file.""" print("%-46s %19s %12s" % ("File Name", "Modified ", "Size"), file=file) for zinfo in self.filelist: date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6] print("%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size), file=file) def testzip(self): """Read all the files and check the CRC.""" chunk_size = 2 ** 20 for zinfo in self.filelist: try: # Read by chunks, to avoid an OverflowError or a # MemoryError with very large embedded files. with self.open(zinfo.filename, "r") as f: while f.read(chunk_size): # Check CRC-32 pass except BadZipFile: return zinfo.filename def getinfo(self, name): """Return the instance of ZipInfo given 'name'.""" info = self.NameToInfo.get(name) if info is None: raise KeyError( 'There is no item named %r in the archive' % name) return info def setpassword(self, pwd): """Set default password for encrypted files.""" if pwd and not isinstance(pwd, bytes): raise TypeError("pwd: expected bytes, got %s" % type(pwd).__name__) if pwd: self.pwd = pwd else: self.pwd = None @property def comment(self): """The comment text associated with the ZIP file.""" return self._comment @comment.setter def comment(self, comment): if not isinstance(comment, bytes): raise TypeError("comment: expected bytes, got %s" % type(comment).__name__) # check for valid comment length if len(comment) > ZIP_MAX_COMMENT: import warnings warnings.warn('Archive comment is too long; truncating to %d bytes' % ZIP_MAX_COMMENT, stacklevel=2) comment = comment[:ZIP_MAX_COMMENT] self._comment = comment self._didModify = True def read(self, name, pwd=None): """Return file bytes for name.""" with self.open(name, "r", pwd) as fp: return fp.read() def open(self, name, mode="r", pwd=None, *, force_zip64=False): """Return file-like object for 'name'. name is a string for the file name within the ZIP file, or a ZipInfo object. mode should be 'r' to read a file already in the ZIP file, or 'w' to write to a file newly added to the archive. pwd is the password to decrypt files (only used for reading). When writing, if the file size is not known in advance but may exceed 2 GiB, pass force_zip64 to use the ZIP64 format, which can handle large files. If the size is known in advance, it is best to pass a ZipInfo instance for name, with zinfo.file_size set. """ if mode not in {"r", "w"}: raise ValueError('open() requires mode "r" or "w"') if pwd and not isinstance(pwd, bytes): raise TypeError("pwd: expected bytes, got %s" % type(pwd).__name__) if pwd and (mode == "w"): raise ValueError("pwd is only supported for reading files") if not self.fp: raise ValueError( "Attempt to use ZIP archive that was already closed") # Make sure we have an info object if isinstance(name, ZipInfo): # 'name' is already an info object zinfo = name elif mode == 'w': zinfo = ZipInfo(name) zinfo.compress_type = self.compression zinfo._compresslevel = self.compresslevel else: # Get info object for name zinfo = self.getinfo(name) if mode == 'w': return self._open_to_write(zinfo, force_zip64=force_zip64) if self._writing: raise ValueError("Can't read from the ZIP file while there " "is an open writing handle on it. " "Close the writing handle before trying to read.") # Open for reading: self._fileRefCnt += 1 zef_file = _SharedFile(self.fp, zinfo.header_offset, self._fpclose, self._lock, lambda: self._writing) try: # Skip the file header: fheader = zef_file.read(sizeFileHeader) if len(fheader) != sizeFileHeader: raise BadZipFile("Truncated file header") fheader = struct.unpack(structFileHeader, fheader) if fheader[_FH_SIGNATURE] != stringFileHeader: raise BadZipFile("Bad magic number for file header") fname = zef_file.read(fheader[_FH_FILENAME_LENGTH]) if fheader[_FH_EXTRA_FIELD_LENGTH]: zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH]) if zinfo.flag_bits & 0x20: # Zip 2.7: compressed patched data raise NotImplementedError("compressed patched data (flag bit 5)") if zinfo.flag_bits & 0x40: # strong encryption raise NotImplementedError("strong encryption (flag bit 6)") if zinfo.flag_bits & 0x800: # UTF-8 filename fname_str = fname.decode("utf-8") else: fname_str = fname.decode("cp437") if fname_str != zinfo.orig_filename: raise BadZipFile( 'File name in directory %r and header %r differ.' % (zinfo.orig_filename, fname)) # check for encrypted flag & handle password is_encrypted = zinfo.flag_bits & 0x1 zd = None if is_encrypted: if not pwd: pwd = self.pwd if not pwd: raise RuntimeError("File %r is encrypted, password " "required for extraction" % name) zd = _ZipDecrypter(pwd) # The first 12 bytes in the cypher stream is an encryption header # used to strengthen the algorithm. The first 11 bytes are # completely random, while the 12th contains the MSB of the CRC, # or the MSB of the file time depending on the header type # and is used to check the correctness of the password. header = zef_file.read(12) h = zd(header[0:12]) if zinfo.flag_bits & 0x8: # compare against the file type from extended local headers check_byte = (zinfo._raw_time >> 8) & 0xff else: # compare against the CRC otherwise check_byte = (zinfo.CRC >> 24) & 0xff if h[11] != check_byte: raise RuntimeError("Bad password for file %r" % name) return ZipExtFile(zef_file, mode, zinfo, zd, True) except: zef_file.close() raise def _open_to_write(self, zinfo, force_zip64=False): if force_zip64 and not self._allowZip64: raise ValueError( "force_zip64 is True, but allowZip64 was False when opening " "the ZIP file." ) if self._writing: raise ValueError("Can't write to the ZIP file while there is " "another write handle open on it. " "Close the first handle before opening another.") # Sizes and CRC are overwritten with correct data after processing the file if not hasattr(zinfo, 'file_size'): zinfo.file_size = 0 zinfo.compress_size = 0 zinfo.CRC = 0 zinfo.flag_bits = 0x00 if zinfo.compress_type == ZIP_LZMA: # Compressed data includes an end-of-stream (EOS) marker zinfo.flag_bits |= 0x02 if not self._seekable: zinfo.flag_bits |= 0x08 if not zinfo.external_attr: zinfo.external_attr = 0o600 << 16 # permissions: ?rw------- # Compressed size can be larger than uncompressed size zip64 = self._allowZip64 and \ (force_zip64 or zinfo.file_size * 1.05 > ZIP64_LIMIT) if self._seekable: self.fp.seek(self.start_dir) zinfo.header_offset = self.fp.tell() self._writecheck(zinfo) self._didModify = True self.fp.write(zinfo.FileHeader(zip64)) self._writing = True return _ZipWriteFile(self, zinfo, zip64) def extract(self, member, path=None, pwd=None): """Extract a member from the archive to the current working directory, using its full name. Its file information is extracted as accurately as possible. `member' may be a filename or a ZipInfo object. You can specify a different directory using `path'. """ if path is None: path = os.getcwd() else: path = os.fspath(path) return self._extract_member(member, path, pwd) def extractall(self, path=None, members=None, pwd=None): """Extract all members from the archive to the current working directory. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by namelist(). """ if members is None: members = self.namelist() if path is None: path = os.getcwd() else: path = os.fspath(path) for zipinfo in members: self._extract_member(zipinfo, path, pwd) @classmethod def _sanitize_windows_name(cls, arcname, pathsep): """Replace bad characters and remove trailing dots from parts.""" table = cls._windows_illegal_name_trans_table if not table: illegal = ':<>|"?*' table = str.maketrans(illegal, '_' * len(illegal)) cls._windows_illegal_name_trans_table = table arcname = arcname.translate(table) # remove trailing dots arcname = (x.rstrip('.') for x in arcname.split(pathsep)) # rejoin, removing empty parts. arcname = pathsep.join(x for x in arcname if x) return arcname def _extract_member(self, member, targetpath, pwd): """Extract the ZipInfo object 'member' to a physical file on the path targetpath. """ if not isinstance(member, ZipInfo): member = self.getinfo(member) # build the destination pathname, replacing # forward slashes to platform specific separators. arcname = member.filename.replace('/', os.path.sep) if os.path.altsep: arcname = arcname.replace(os.path.altsep, os.path.sep) # interpret absolute pathname as relative, remove drive letter or # UNC path, redundant separators, "." and ".." components. arcname = os.path.splitdrive(arcname)[1] invalid_path_parts = ('', os.path.curdir, os.path.pardir) arcname = os.path.sep.join(x for x in arcname.split(os.path.sep) if x not in invalid_path_parts) if os.path.sep == '\\': # filter illegal characters on Windows arcname = self._sanitize_windows_name(arcname, os.path.sep) targetpath = os.path.join(targetpath, arcname) targetpath = os.path.normpath(targetpath) # Create all upper directories if necessary. upperdirs = os.path.dirname(targetpath) if upperdirs and not os.path.exists(upperdirs): os.makedirs(upperdirs) if member.is_dir(): if not os.path.isdir(targetpath): os.mkdir(targetpath) return targetpath with self.open(member, pwd=pwd) as source, \ open(targetpath, "wb") as target: shutil.copyfileobj(source, target) return targetpath def _writecheck(self, zinfo): """Check for errors before writing a file to the archive.""" if zinfo.filename in self.NameToInfo: import warnings warnings.warn('Duplicate name: %r' % zinfo.filename, stacklevel=3) if self.mode not in ('w', 'x', 'a'): raise ValueError("write() requires mode 'w', 'x', or 'a'") if not self.fp: raise ValueError( "Attempt to write ZIP archive that was already closed") _check_compression(zinfo.compress_type) if not self._allowZip64: requires_zip64 = None if len(self.filelist) >= ZIP_FILECOUNT_LIMIT: requires_zip64 = "Files count" elif zinfo.file_size > ZIP64_LIMIT: requires_zip64 = "Filesize" elif zinfo.header_offset > ZIP64_LIMIT: requires_zip64 = "Zipfile size" if requires_zip64: raise LargeZipFile(requires_zip64 + " would require ZIP64 extensions") def write(self, filename, arcname=None, compress_type=None, compresslevel=None): """Put the bytes from filename into the archive under the name arcname.""" if not self.fp: raise ValueError( "Attempt to write to ZIP archive that was already closed") if self._writing: raise ValueError( "Can't write to ZIP archive while an open writing handle exists" ) zinfo = ZipInfo.from_file(filename, arcname) if zinfo.is_dir(): zinfo.compress_size = 0 zinfo.CRC = 0 else: if compress_type is not None: zinfo.compress_type = compress_type else: zinfo.compress_type = self.compression if compresslevel is not None: zinfo._compresslevel = compresslevel else: zinfo._compresslevel = self.compresslevel if zinfo.is_dir(): with self._lock: if self._seekable: self.fp.seek(self.start_dir) zinfo.header_offset = self.fp.tell() # Start of header bytes if zinfo.compress_type == ZIP_LZMA: # Compressed data includes an end-of-stream (EOS) marker zinfo.flag_bits |= 0x02 self._writecheck(zinfo) self._didModify = True self.filelist.append(zinfo) self.NameToInfo[zinfo.filename] = zinfo self.fp.write(zinfo.FileHeader(False)) self.start_dir = self.fp.tell() else: with open(filename, "rb") as src, self.open(zinfo, 'w') as dest: shutil.copyfileobj(src, dest, 1024*8) def writestr(self, zinfo_or_arcname, data, compress_type=None, compresslevel=None): """Write a file into the archive. The contents is 'data', which may be either a 'str' or a 'bytes' instance; if it is a 'str', it is encoded as UTF-8 first. 'zinfo_or_arcname' is either a ZipInfo instance or the name of the file in the archive.""" if isinstance(data, str): data = data.encode("utf-8") if not isinstance(zinfo_or_arcname, ZipInfo): zinfo = ZipInfo(filename=zinfo_or_arcname, date_time=time.localtime(time.time())[:6]) zinfo.compress_type = self.compression zinfo._compresslevel = self.compresslevel if zinfo.filename[-1] == '/': zinfo.external_attr = 0o40775 << 16 # drwxrwxr-x zinfo.external_attr |= 0x10 # MS-DOS directory flag else: zinfo.external_attr = 0o600 << 16 # ?rw------- else: zinfo = zinfo_or_arcname if not self.fp: raise ValueError( "Attempt to write to ZIP archive that was already closed") if self._writing: raise ValueError( "Can't write to ZIP archive while an open writing handle exists." ) if compress_type is not None: zinfo.compress_type = compress_type if compresslevel is not None: zinfo._compresslevel = compresslevel zinfo.file_size = len(data) # Uncompressed size with self._lock: with self.open(zinfo, mode='w') as dest: dest.write(data) def __del__(self): """Call the "close()" method in case the user forgot.""" self.close() def close(self): """Close the file, and for mode 'w', 'x' and 'a' write the ending records.""" if self.fp is None: return if self._writing: raise ValueError("Can't close the ZIP file while there is " "an open writing handle on it. " "Close the writing handle before closing the zip.") try: if self.mode in ('w', 'x', 'a') and self._didModify: # write ending records with self._lock: if self._seekable: self.fp.seek(self.start_dir) self._write_end_record() finally: fp = self.fp self.fp = None self._fpclose(fp) def _write_end_record(self): for zinfo in self.filelist: # write central directory dt = zinfo.date_time dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2] dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2) extra = [] if zinfo.file_size > ZIP64_LIMIT \ or zinfo.compress_size > ZIP64_LIMIT: extra.append(zinfo.file_size) extra.append(zinfo.compress_size) file_size = 0xffffffff compress_size = 0xffffffff else: file_size = zinfo.file_size compress_size = zinfo.compress_size if zinfo.header_offset > ZIP64_LIMIT: extra.append(zinfo.header_offset) header_offset = 0xffffffff else: header_offset = zinfo.header_offset extra_data = zinfo.extra min_version = 0 if extra: # Append a ZIP64 field to the extra's extra_data = _strip_extra(extra_data, (1,)) extra_data = struct.pack( '<HH' + 'Q'*len(extra), 1, 8*len(extra), *extra) + extra_data min_version = ZIP64_VERSION if zinfo.compress_type == ZIP_BZIP2: min_version = max(BZIP2_VERSION, min_version) elif zinfo.compress_type == ZIP_LZMA: min_version = max(LZMA_VERSION, min_version) extract_version = max(min_version, zinfo.extract_version) create_version = max(min_version, zinfo.create_version) try: filename, flag_bits = zinfo._encodeFilenameFlags() centdir = struct.pack(structCentralDir, stringCentralDir, create_version, zinfo.create_system, extract_version, zinfo.reserved, flag_bits, zinfo.compress_type, dostime, dosdate, zinfo.CRC, compress_size, file_size, len(filename), len(extra_data), len(zinfo.comment), 0, zinfo.internal_attr, zinfo.external_attr, header_offset) except DeprecationWarning: print((structCentralDir, stringCentralDir, create_version, zinfo.create_system, extract_version, zinfo.reserved, zinfo.flag_bits, zinfo.compress_type, dostime, dosdate, zinfo.CRC, compress_size, file_size, len(zinfo.filename), len(extra_data), len(zinfo.comment), 0, zinfo.internal_attr, zinfo.external_attr, header_offset), file=sys.stderr) raise self.fp.write(centdir) self.fp.write(filename) self.fp.write(extra_data) self.fp.write(zinfo.comment) pos2 = self.fp.tell() # Write end-of-zip-archive record centDirCount = len(self.filelist) centDirSize = pos2 - self.start_dir centDirOffset = self.start_dir requires_zip64 = None if centDirCount > ZIP_FILECOUNT_LIMIT: requires_zip64 = "Files count" elif centDirOffset > ZIP64_LIMIT: requires_zip64 = "Central directory offset" elif centDirSize > ZIP64_LIMIT: requires_zip64 = "Central directory size" if requires_zip64: # Need to write the ZIP64 end-of-archive records if not self._allowZip64: raise LargeZipFile(requires_zip64 + " would require ZIP64 extensions") zip64endrec = struct.pack( structEndArchive64, stringEndArchive64, 44, 45, 45, 0, 0, centDirCount, centDirCount, centDirSize, centDirOffset) self.fp.write(zip64endrec) zip64locrec = struct.pack( structEndArchive64Locator, stringEndArchive64Locator, 0, pos2, 1) self.fp.write(zip64locrec) centDirCount = min(centDirCount, 0xFFFF) centDirSize = min(centDirSize, 0xFFFFFFFF) centDirOffset = min(centDirOffset, 0xFFFFFFFF) endrec = struct.pack(structEndArchive, stringEndArchive, 0, 0, centDirCount, centDirCount, centDirSize, centDirOffset, len(self._comment)) self.fp.write(endrec) self.fp.write(self._comment) self.fp.flush() def _fpclose(self, fp): assert self._fileRefCnt > 0 self._fileRefCnt -= 1 if not self._fileRefCnt and not self._filePassed: fp.close() class PyZipFile(ZipFile): """Class to create ZIP archives with Python library files and packages.""" def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=True, optimize=-1): ZipFile.__init__(self, file, mode=mode, compression=compression, allowZip64=allowZip64) self._optimize = optimize def writepy(self, pathname, basename="", filterfunc=None): """Add all files from "pathname" to the ZIP archive. If pathname is a package directory, search the directory and all package subdirectories recursively for all *.py and enter the modules into the archive. If pathname is a plain directory, listdir *.py and enter all modules. Else, pathname must be a Python *.py file and the module will be put into the archive. Added modules are always module.pyc. This method will compile the module.py into module.pyc if necessary. If filterfunc(pathname) is given, it is called with every argument. When it is False, the file or directory is skipped. """ pathname = os.fspath(pathname) if filterfunc and not filterfunc(pathname): if self.debug: label = 'path' if os.path.isdir(pathname) else 'file' print('%s %r skipped by filterfunc' % (label, pathname)) return dir, name = os.path.split(pathname) if os.path.isdir(pathname): initname = os.path.join(pathname, "__init__.py") if os.path.isfile(initname): # This is a package directory, add it if basename: basename = "%s/%s" % (basename, name) else: basename = name if self.debug: print("Adding package in", pathname, "as", basename) fname, arcname = self._get_codename(initname[0:-3], basename) if self.debug: print("Adding", arcname) self.write(fname, arcname) dirlist = sorted(os.listdir(pathname)) dirlist.remove("__init__.py") # Add all *.py files and package subdirectories for filename in dirlist: path = os.path.join(pathname, filename) root, ext = os.path.splitext(filename) if os.path.isdir(path): if os.path.isfile(os.path.join(path, "__init__.py")): # This is a package directory, add it self.writepy(path, basename, filterfunc=filterfunc) # Recursive call elif ext == ".py": if filterfunc and not filterfunc(path): if self.debug: print('file %r skipped by filterfunc' % path) continue fname, arcname = self._get_codename(path[0:-3], basename) if self.debug: print("Adding", arcname) self.write(fname, arcname) else: # This is NOT a package directory, add its files at top level if self.debug: print("Adding files from directory", pathname) for filename in sorted(os.listdir(pathname)): path = os.path.join(pathname, filename) root, ext = os.path.splitext(filename) if ext == ".py": if filterfunc and not filterfunc(path): if self.debug: print('file %r skipped by filterfunc' % path) continue fname, arcname = self._get_codename(path[0:-3], basename) if self.debug: print("Adding", arcname) self.write(fname, arcname) else: if pathname[-3:] != ".py": raise RuntimeError( 'Files added with writepy() must end with ".py"') fname, arcname = self._get_codename(pathname[0:-3], basename) if self.debug: print("Adding file", arcname) self.write(fname, arcname) def _get_codename(self, pathname, basename): """Return (filename, archivename) for the path. Given a module name path, return the correct file path and archive name, compiling if necessary. For example, given /python/lib/string, return (/python/lib/string.pyc, string). """ def _compile(file, optimize=-1): import py_compile if self.debug: print("Compiling", file) try: py_compile.compile(file, doraise=True, optimize=optimize) except py_compile.PyCompileError as err: print(err.msg) return False return True file_py = pathname + ".py" file_pyc = pathname + ".pyc" pycache_opt0 = importlib.util.cache_from_source(file_py, optimization='') pycache_opt1 = importlib.util.cache_from_source(file_py, optimization=1) pycache_opt2 = importlib.util.cache_from_source(file_py, optimization=2) if self._optimize == -1: # legacy mode: use whatever file is present if (os.path.isfile(file_pyc) and os.stat(file_pyc).st_mtime >= os.stat(file_py).st_mtime): # Use .pyc file. arcname = fname = file_pyc elif (os.path.isfile(pycache_opt0) and os.stat(pycache_opt0).st_mtime >= os.stat(file_py).st_mtime): # Use the __pycache__/*.pyc file, but write it to the legacy pyc # file name in the archive. fname = pycache_opt0 arcname = file_pyc elif (os.path.isfile(pycache_opt1) and os.stat(pycache_opt1).st_mtime >= os.stat(file_py).st_mtime): # Use the __pycache__/*.pyc file, but write it to the legacy pyc # file name in the archive. fname = pycache_opt1 arcname = file_pyc elif (os.path.isfile(pycache_opt2) and os.stat(pycache_opt2).st_mtime >= os.stat(file_py).st_mtime): # Use the __pycache__/*.pyc file, but write it to the legacy pyc # file name in the archive. fname = pycache_opt2 arcname = file_pyc else: # Compile py into PEP 3147 pyc file. if _compile(file_py): if sys.flags.optimize == 0: fname = pycache_opt0 elif sys.flags.optimize == 1: fname = pycache_opt1 else: fname = pycache_opt2 arcname = file_pyc else: fname = arcname = file_py else: # new mode: use given optimization level if self._optimize == 0: fname = pycache_opt0 arcname = file_pyc else: arcname = file_pyc if self._optimize == 1: fname = pycache_opt1 elif self._optimize == 2: fname = pycache_opt2 else: msg = "invalid value for 'optimize': {!r}".format(self._optimize) raise ValueError(msg) if not (os.path.isfile(fname) and os.stat(fname).st_mtime >= os.stat(file_py).st_mtime): if not _compile(file_py, optimize=self._optimize): fname = arcname = file_py archivename = os.path.split(arcname)[1] if basename: archivename = "%s/%s" % (basename, archivename) return (fname, archivename) def main(args=None): import argparse description = 'A simple command-line interface for zipfile module.' parser = argparse.ArgumentParser(description=description) group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-l', '--list', metavar='<zipfile>', help='Show listing of a zipfile') group.add_argument('-e', '--extract', nargs=2, metavar=('<zipfile>', '<output_dir>'), help='Extract zipfile into target dir') group.add_argument('-c', '--create', nargs='+', metavar=('<name>', '<file>'), help='Create zipfile from sources') group.add_argument('-t', '--test', metavar='<zipfile>', help='Test if a zipfile is valid') args = parser.parse_args(args) if args.test is not None: src = args.test with ZipFile(src, 'r') as zf: badfile = zf.testzip() if badfile: print("The following enclosed file is corrupted: {!r}".format(badfile)) print("Done testing") elif args.list is not None: src = args.list with ZipFile(src, 'r') as zf: zf.printdir() elif args.extract is not None: src, curdir = args.extract with ZipFile(src, 'r') as zf: zf.extractall(curdir) elif args.create is not None: zip_name = args.create.pop(0) files = args.create def addToZip(zf, path, zippath): if os.path.isfile(path): zf.write(path, zippath, ZIP_DEFLATED) elif os.path.isdir(path): if zippath: zf.write(path, zippath) for nm in sorted(os.listdir(path)): addToZip(zf, os.path.join(path, nm), os.path.join(zippath, nm)) # else: ignore with ZipFile(zip_name, 'w') as zf: for path in files: zippath = os.path.basename(path) if not zippath: zippath = os.path.basename(os.path.dirname(path)) if zippath in ('', os.curdir, os.pardir): zippath = '' addToZip(zf, path, zippath) if __name__ == "__main__": main()
apache-2.0
dblia/nosql-ganeti
test/py/ganeti.rapi.rlib2_unittest.py
2
58976
#!/usr/bin/python # # Copyright (C) 2010, 2012 Google Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """Script for unittesting the RAPI rlib2 module """ import unittest import itertools import random from ganeti import constants from ganeti import opcodes from ganeti import compat from ganeti import http from ganeti import query from ganeti import luxi from ganeti import errors from ganeti import rapi from ganeti.rapi import rlib2 from ganeti.rapi import baserlib from ganeti.rapi import connector import testutils class _FakeRequestPrivateData: def __init__(self, body_data): self.body_data = body_data class _FakeRequest: def __init__(self, body_data): self.private = _FakeRequestPrivateData(body_data) def _CreateHandler(cls, items, queryargs, body_data, client_cls): return cls(items, queryargs, _FakeRequest(body_data), _client_cls=client_cls) class _FakeClient: def __init__(self, address=None): self._jobs = [] def GetNextSubmittedJob(self): return self._jobs.pop(0) def SubmitJob(self, ops): job_id = str(1 + int(random.random() * 1000000)) self._jobs.append((job_id, ops)) return job_id class _FakeClientFactory: def __init__(self, cls): self._client_cls = cls self._clients = [] def GetNextClient(self): return self._clients.pop(0) def __call__(self, address=None): cl = self._client_cls(address=address) self._clients.append(cl) return cl class TestConstants(unittest.TestCase): def testConsole(self): # Exporting the console field without authentication might expose # information assert "console" in query.INSTANCE_FIELDS self.assertTrue("console" not in rlib2.I_FIELDS) def testFields(self): checks = { constants.QR_INSTANCE: rlib2.I_FIELDS, constants.QR_NODE: rlib2.N_FIELDS, constants.QR_GROUP: rlib2.G_FIELDS, } for (qr, fields) in checks.items(): self.assertFalse(set(fields) - set(query.ALL_FIELDS[qr].keys())) class TestClientConnectError(unittest.TestCase): @staticmethod def _FailingClient(address=None): raise luxi.NoMasterError("test") def test(self): resources = [ rlib2.R_2_groups, rlib2.R_2_instances, rlib2.R_2_nodes, ] for cls in resources: handler = _CreateHandler(cls, ["name"], [], None, self._FailingClient) self.assertRaises(http.HttpBadGateway, handler.GET) class TestJobSubmitError(unittest.TestCase): class _SubmitErrorClient: def __init__(self, address=None): pass @staticmethod def SubmitJob(ops): raise errors.JobQueueFull("test") def test(self): handler = _CreateHandler(rlib2.R_2_redist_config, [], [], None, self._SubmitErrorClient) self.assertRaises(http.HttpServiceUnavailable, handler.PUT) class TestClusterModify(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_cluster_modify, [], [], { "vg_name": "testvg", "candidate_pool_size": 100, }, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpClusterSetParams)) self.assertEqual(op.vg_name, "testvg") self.assertEqual(op.candidate_pool_size, 100) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testInvalidValue(self): for attr in ["vg_name", "candidate_pool_size", "beparams", "_-Unknown#"]: clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_cluster_modify, [], [], { attr: True, }, clfactory) self.assertRaises(http.HttpBadRequest, handler.PUT) self.assertRaises(IndexError, clfactory.GetNextClient) class TestRedistConfig(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_redist_config, [], [], None, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpClusterRedistConf)) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestNodeMigrate(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_nodes_name_migrate, ["node1"], {}, { "iallocator": "fooalloc", }, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpNodeMigrate)) self.assertEqual(op.node_name, "node1") self.assertEqual(op.iallocator, "fooalloc") self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testQueryArgsConflict(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_nodes_name_migrate, ["node2"], { "live": True, "mode": constants.HT_MIGRATION_NONLIVE, }, None, clfactory) self.assertRaises(http.HttpBadRequest, handler.POST) self.assertRaises(IndexError, clfactory.GetNextClient) def testQueryArgsMode(self): clfactory = _FakeClientFactory(_FakeClient) queryargs = { "mode": [constants.HT_MIGRATION_LIVE], } handler = _CreateHandler(rlib2.R_2_nodes_name_migrate, ["node17292"], queryargs, None, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpNodeMigrate)) self.assertEqual(op.node_name, "node17292") self.assertEqual(op.mode, constants.HT_MIGRATION_LIVE) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testQueryArgsLive(self): clfactory = _FakeClientFactory(_FakeClient) for live in [False, True]: queryargs = { "live": [str(int(live))], } handler = _CreateHandler(rlib2.R_2_nodes_name_migrate, ["node6940"], queryargs, None, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpNodeMigrate)) self.assertEqual(op.node_name, "node6940") if live: self.assertEqual(op.mode, constants.HT_MIGRATION_LIVE) else: self.assertEqual(op.mode, constants.HT_MIGRATION_NONLIVE) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestNodeEvacuate(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_nodes_name_evacuate, ["node92"], { "dry-run": ["1"], }, { "mode": constants.IALLOCATOR_NEVAC_SEC, }, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpNodeEvacuate)) self.assertEqual(op.node_name, "node92") self.assertEqual(op.mode, constants.IALLOCATOR_NEVAC_SEC) self.assertTrue(op.dry_run) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestNodePowercycle(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_nodes_name_powercycle, ["node20744"], { "force": ["1"], }, None, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpNodePowercycle)) self.assertEqual(op.node_name, "node20744") self.assertTrue(op.force) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestGroupAssignNodes(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_groups_name_assign_nodes, ["grp-a"], { "dry-run": ["1"], "force": ["1"], }, { "nodes": ["n2", "n3"], }, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpGroupAssignNodes)) self.assertEqual(op.group_name, "grp-a") self.assertEqual(op.nodes, ["n2", "n3"]) self.assertTrue(op.dry_run) self.assertTrue(op.force) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceDelete(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_instances_name, ["inst30965"], { "dry-run": ["1"], }, {}, clfactory) job_id = handler.DELETE() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceRemove)) self.assertEqual(op.instance_name, "inst30965") self.assertTrue(op.dry_run) self.assertFalse(op.ignore_failures) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceInfo(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_instances_name_info, ["inst31217"], { "static": ["1"], }, {}, clfactory) job_id = handler.GET() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceQueryData)) self.assertEqual(op.instances, ["inst31217"]) self.assertTrue(op.static) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceReboot(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_instances_name_reboot, ["inst847"], { "dry-run": ["1"], "ignore_secondaries": ["1"], }, {}, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceReboot)) self.assertEqual(op.instance_name, "inst847") self.assertEqual(op.reboot_type, constants.INSTANCE_REBOOT_HARD) self.assertTrue(op.ignore_secondaries) self.assertTrue(op.dry_run) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceStartup(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_instances_name_startup, ["inst31083"], { "force": ["1"], "no_remember": ["1"], }, {}, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceStartup)) self.assertEqual(op.instance_name, "inst31083") self.assertTrue(op.no_remember) self.assertTrue(op.force) self.assertFalse(op.dry_run) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceShutdown(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_instances_name_shutdown, ["inst26791"], { "no_remember": ["0"], }, {}, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceShutdown)) self.assertEqual(op.instance_name, "inst26791") self.assertFalse(op.no_remember) self.assertFalse(op.dry_run) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceActivateDisks(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_instances_name_activate_disks, ["xyz"], { "ignore_size": ["1"], }, {}, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceActivateDisks)) self.assertEqual(op.instance_name, "xyz") self.assertTrue(op.ignore_size) self.assertFalse(hasattr(op, "dry_run")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceDeactivateDisks(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_instances_name_deactivate_disks, ["inst22357"], {}, {}, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceDeactivateDisks)) self.assertEqual(op.instance_name, "inst22357") self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceRecreateDisks(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_instances_name_recreate_disks, ["inst22357"], {}, {}, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceRecreateDisks)) self.assertEqual(op.instance_name, "inst22357") self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceFailover(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_instances_name_failover, ["inst12794"], {}, {}, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceFailover)) self.assertEqual(op.instance_name, "inst12794") self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceDiskGrow(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) data = { "amount": 1024, } handler = _CreateHandler(rlib2.R_2_instances_name_disk_grow, ["inst10742", "3"], {}, data, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceGrowDisk)) self.assertEqual(op.instance_name, "inst10742") self.assertEqual(op.disk, 3) self.assertEqual(op.amount, 1024) self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestBackupPrepare(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) queryargs = { "mode": constants.EXPORT_MODE_REMOTE, } handler = _CreateHandler(rlib2.R_2_instances_name_prepare_export, ["inst17925"], queryargs, {}, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpBackupPrepare)) self.assertEqual(op.instance_name, "inst17925") self.assertEqual(op.mode, constants.EXPORT_MODE_REMOTE) self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestGroupRemove(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_groups_name, ["grp28575"], {}, {}, clfactory) job_id = handler.DELETE() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpGroupRemove)) self.assertEqual(op.group_name, "grp28575") self.assertFalse(op.dry_run) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestStorageQuery(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) queryargs = { "storage_type": constants.ST_LVM_PV, "output_fields": "name,other", } handler = _CreateHandler(rlib2.R_2_nodes_name_storage, ["node21075"], queryargs, {}, clfactory) job_id = handler.GET() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpNodeQueryStorage)) self.assertEqual(op.nodes, ["node21075"]) self.assertEqual(op.storage_type, constants.ST_LVM_PV) self.assertEqual(op.output_fields, ["name", "other"]) self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testErrors(self): clfactory = _FakeClientFactory(_FakeClient) queryargs = { "output_fields": "name,other", } handler = _CreateHandler(rlib2.R_2_nodes_name_storage, ["node10538"], queryargs, {}, clfactory) self.assertRaises(http.HttpBadRequest, handler.GET) queryargs = { "storage_type": constants.ST_LVM_VG, } handler = _CreateHandler(rlib2.R_2_nodes_name_storage, ["node21273"], queryargs, {}, clfactory) self.assertRaises(http.HttpBadRequest, handler.GET) queryargs = { "storage_type": "##unknown_storage##", "output_fields": "name,other", } handler = _CreateHandler(rlib2.R_2_nodes_name_storage, ["node10315"], queryargs, {}, clfactory) self.assertRaises(http.HttpBadRequest, handler.GET) class TestStorageModify(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) for allocatable in [None, "1", "0"]: queryargs = { "storage_type": constants.ST_LVM_VG, "name": "pv-a", } if allocatable is not None: queryargs["allocatable"] = allocatable handler = _CreateHandler(rlib2.R_2_nodes_name_storage_modify, ["node9292"], queryargs, {}, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpNodeModifyStorage)) self.assertEqual(op.node_name, "node9292") self.assertEqual(op.storage_type, constants.ST_LVM_VG) self.assertEqual(op.name, "pv-a") if allocatable is None: self.assertFalse(op.changes) else: assert allocatable in ("0", "1") self.assertEqual(op.changes, { constants.SF_ALLOCATABLE: (allocatable == "1"), }) self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testErrors(self): clfactory = _FakeClientFactory(_FakeClient) # No storage type queryargs = { "name": "xyz", } handler = _CreateHandler(rlib2.R_2_nodes_name_storage_modify, ["node26016"], queryargs, {}, clfactory) self.assertRaises(http.HttpBadRequest, handler.PUT) # No name queryargs = { "storage_type": constants.ST_LVM_VG, } handler = _CreateHandler(rlib2.R_2_nodes_name_storage_modify, ["node21218"], queryargs, {}, clfactory) self.assertRaises(http.HttpBadRequest, handler.PUT) # Invalid value queryargs = { "storage_type": constants.ST_LVM_VG, "name": "pv-b", "allocatable": "noint", } handler = _CreateHandler(rlib2.R_2_nodes_name_storage_modify, ["node30685"], queryargs, {}, clfactory) self.assertRaises(http.HttpBadRequest, handler.PUT) class TestStorageRepair(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) queryargs = { "storage_type": constants.ST_LVM_PV, "name": "pv16611", } handler = _CreateHandler(rlib2.R_2_nodes_name_storage_repair, ["node19265"], queryargs, {}, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpRepairNodeStorage)) self.assertEqual(op.node_name, "node19265") self.assertEqual(op.storage_type, constants.ST_LVM_PV) self.assertEqual(op.name, "pv16611") self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testErrors(self): clfactory = _FakeClientFactory(_FakeClient) # No storage type queryargs = { "name": "xyz", } handler = _CreateHandler(rlib2.R_2_nodes_name_storage_repair, ["node11275"], queryargs, {}, clfactory) self.assertRaises(http.HttpBadRequest, handler.PUT) # No name queryargs = { "storage_type": constants.ST_LVM_VG, } handler = _CreateHandler(rlib2.R_2_nodes_name_storage_repair, ["node21218"], queryargs, {}, clfactory) self.assertRaises(http.HttpBadRequest, handler.PUT) class TestTags(unittest.TestCase): TAG_HANDLERS = [ rlib2.R_2_instances_name_tags, rlib2.R_2_nodes_name_tags, rlib2.R_2_groups_name_tags, rlib2.R_2_tags, ] def testSetAndDelete(self): clfactory = _FakeClientFactory(_FakeClient) for method, opcls in [("PUT", opcodes.OpTagsSet), ("DELETE", opcodes.OpTagsDel)]: for idx, handler in enumerate(self.TAG_HANDLERS): dry_run = bool(idx % 2) name = "test%s" % idx queryargs = { "tag": ["foo", "bar", "baz"], "dry-run": str(int(dry_run)), } handler = _CreateHandler(handler, [name], queryargs, {}, clfactory) job_id = getattr(handler, method)() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcls)) self.assertEqual(op.kind, handler.TAG_LEVEL) if handler.TAG_LEVEL == constants.TAG_CLUSTER: self.assertTrue(op.name is None) else: self.assertEqual(op.name, name) self.assertEqual(op.tags, ["foo", "bar", "baz"]) self.assertEqual(op.dry_run, dry_run) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceCreation(testutils.GanetiTestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) name = "inst863.example.com" disk_variants = [ # No disks [], # Two disks [{"size": 5, }, {"size": 100, }], # Disk with mode [{"size": 123, "mode": constants.DISK_RDWR, }], ] nic_variants = [ # No NIC [], # Three NICs [{}, {}, {}], # Two NICs [ { "ip": "192.0.2.6", "mode": constants.NIC_MODE_ROUTED, "mac": "01:23:45:67:68:9A", }, { "mode": constants.NIC_MODE_BRIDGED, "link": "br1" }, ], ] beparam_variants = [ None, {}, { constants.BE_VCPUS: 2, }, { constants.BE_MAXMEM: 200, }, { constants.BE_MEMORY: 256, }, { constants.BE_VCPUS: 2, constants.BE_MAXMEM: 1024, constants.BE_MINMEM: 1024, constants.BE_AUTO_BALANCE: True, constants.BE_ALWAYS_FAILOVER: True, } ] hvparam_variants = [ None, { constants.HV_BOOT_ORDER: "anc", }, { constants.HV_KERNEL_PATH: "/boot/fookernel", constants.HV_ROOT_PATH: "/dev/hda1", }, ] for mode in [constants.INSTANCE_CREATE, constants.INSTANCE_IMPORT]: for nics in nic_variants: for disk_template in constants.DISK_TEMPLATES: for disks in disk_variants: for beparams in beparam_variants: for hvparams in hvparam_variants: for dry_run in [False, True]: queryargs = { "dry-run": str(int(dry_run)), } data = { rlib2._REQ_DATA_VERSION: 1, "name": name, "hypervisor": constants.HT_FAKE, "disks": disks, "nics": nics, "mode": mode, "disk_template": disk_template, "os": "debootstrap", } if beparams is not None: data["beparams"] = beparams if hvparams is not None: data["hvparams"] = hvparams handler = _CreateHandler(rlib2.R_2_instances, [], queryargs, data, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertRaises(IndexError, cl.GetNextSubmittedJob) self.assertTrue(isinstance(op, opcodes.OpInstanceCreate)) self.assertEqual(op.instance_name, name) self.assertEqual(op.mode, mode) self.assertEqual(op.disk_template, disk_template) self.assertEqual(op.dry_run, dry_run) self.assertEqual(len(op.disks), len(disks)) self.assertEqual(len(op.nics), len(nics)) for opdisk, disk in zip(op.disks, disks): for key in constants.IDISK_PARAMS: self.assertEqual(opdisk.get(key), disk.get(key)) self.assertFalse("unknown" in opdisk) for opnic, nic in zip(op.nics, nics): for key in constants.INIC_PARAMS: self.assertEqual(opnic.get(key), nic.get(key)) self.assertFalse("unknown" in opnic) self.assertFalse("foobar" in opnic) if beparams is None: self.assertFalse(hasattr(op, "beparams")) else: self.assertEqualValues(op.beparams, beparams) if hvparams is None: self.assertFalse(hasattr(op, "hvparams")) else: self.assertEqualValues(op.hvparams, hvparams) def testLegacyName(self): clfactory = _FakeClientFactory(_FakeClient) name = "inst29128.example.com" data = { rlib2._REQ_DATA_VERSION: 1, "name": name, "disks": [], "nics": [], "mode": constants.INSTANCE_CREATE, "disk_template": constants.DT_PLAIN, } handler = _CreateHandler(rlib2.R_2_instances, [], {}, data, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceCreate)) self.assertEqual(op.instance_name, name) self.assertFalse(hasattr(op, "name")) self.assertFalse(op.dry_run) self.assertRaises(IndexError, cl.GetNextSubmittedJob) # Define both data["instance_name"] = "other.example.com" assert "name" in data and "instance_name" in data handler = _CreateHandler(rlib2.R_2_instances, [], {}, data, clfactory) self.assertRaises(http.HttpBadRequest, handler.POST) self.assertRaises(IndexError, clfactory.GetNextClient) def testLegacyOs(self): clfactory = _FakeClientFactory(_FakeClient) name = "inst4673.example.com" os = "linux29206" data = { rlib2._REQ_DATA_VERSION: 1, "name": name, "os_type": os, "disks": [], "nics": [], "mode": constants.INSTANCE_CREATE, "disk_template": constants.DT_PLAIN, } handler = _CreateHandler(rlib2.R_2_instances, [], {}, data, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceCreate)) self.assertEqual(op.instance_name, name) self.assertEqual(op.os_type, os) self.assertFalse(hasattr(op, "os")) self.assertFalse(op.dry_run) self.assertRaises(IndexError, cl.GetNextSubmittedJob) # Define both data["os"] = "linux9584" assert "os" in data and "os_type" in data handler = _CreateHandler(rlib2.R_2_instances, [], {}, data, clfactory) self.assertRaises(http.HttpBadRequest, handler.POST) def testErrors(self): clfactory = _FakeClientFactory(_FakeClient) # Test all required fields reqfields = { rlib2._REQ_DATA_VERSION: 1, "name": "inst1.example.com", "disks": [], "nics": [], "mode": constants.INSTANCE_CREATE, "disk_template": constants.DT_PLAIN, } for name in reqfields.keys(): data = dict(i for i in reqfields.iteritems() if i[0] != name) handler = _CreateHandler(rlib2.R_2_instances, [], {}, data, clfactory) self.assertRaises(http.HttpBadRequest, handler.POST) self.assertRaises(IndexError, clfactory.GetNextClient) # Invalid disks and nics for field in ["disks", "nics"]: invalid_values = [None, 1, "", {}, [1, 2, 3], ["hda1", "hda2"], [{"_unknown_": False, }]] for invvalue in invalid_values: data = reqfields.copy() data[field] = invvalue handler = _CreateHandler(rlib2.R_2_instances, [], {}, data, clfactory) self.assertRaises(http.HttpBadRequest, handler.POST) self.assertRaises(IndexError, clfactory.GetNextClient) def testVersion(self): clfactory = _FakeClientFactory(_FakeClient) # No version field data = { "name": "inst1.example.com", "disks": [], "nics": [], "mode": constants.INSTANCE_CREATE, "disk_template": constants.DT_PLAIN, } handler = _CreateHandler(rlib2.R_2_instances, [], {}, data, clfactory) self.assertRaises(http.HttpBadRequest, handler.POST) # Old and incorrect versions for version in [0, -1, 10483, "Hello World"]: data[rlib2._REQ_DATA_VERSION] = version handler = _CreateHandler(rlib2.R_2_instances, [], {}, data, clfactory) self.assertRaises(http.HttpBadRequest, handler.POST) self.assertRaises(IndexError, clfactory.GetNextClient) # Correct version data[rlib2._REQ_DATA_VERSION] = 1 handler = _CreateHandler(rlib2.R_2_instances, [], {}, data, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceCreate)) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestBackupExport(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) name = "instmoo" data = { "mode": constants.EXPORT_MODE_REMOTE, "destination": [(1, 2, 3), (99, 99, 99)], "shutdown": True, "remove_instance": True, "x509_key_name": ["name", "hash"], "destination_x509_ca": "---cert---" } handler = _CreateHandler(rlib2.R_2_instances_name_export, [name], {}, data, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpBackupExport)) self.assertEqual(op.instance_name, name) self.assertEqual(op.mode, constants.EXPORT_MODE_REMOTE) self.assertEqual(op.target_node, [(1, 2, 3), (99, 99, 99)]) self.assertEqual(op.shutdown, True) self.assertEqual(op.remove_instance, True) self.assertEqual(op.x509_key_name, ["name", "hash"]) self.assertEqual(op.destination_x509_ca, "---cert---") self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testDefaults(self): clfactory = _FakeClientFactory(_FakeClient) name = "inst1" data = { "destination": "node2", "shutdown": False, } handler = _CreateHandler(rlib2.R_2_instances_name_export, [name], {}, data, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpBackupExport)) self.assertEqual(op.instance_name, name) self.assertEqual(op.target_node, "node2") self.assertFalse(hasattr(op, "mode")) self.assertFalse(hasattr(op, "remove_instance")) self.assertFalse(hasattr(op, "destination")) self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testErrors(self): clfactory = _FakeClientFactory(_FakeClient) for value in ["True", "False"]: handler = _CreateHandler(rlib2.R_2_instances_name_export, ["err1"], {}, { "remove_instance": value, }, clfactory) self.assertRaises(http.HttpBadRequest, handler.PUT) class TestInstanceMigrate(testutils.GanetiTestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) name = "instYooho6ek" for cleanup in [False, True]: for mode in constants.HT_MIGRATION_MODES: data = { "cleanup": cleanup, "mode": mode, } handler = _CreateHandler(rlib2.R_2_instances_name_migrate, [name], {}, data, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceMigrate)) self.assertEqual(op.instance_name, name) self.assertEqual(op.mode, mode) self.assertEqual(op.cleanup, cleanup) self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testDefaults(self): clfactory = _FakeClientFactory(_FakeClient) name = "instnohZeex0" handler = _CreateHandler(rlib2.R_2_instances_name_migrate, [name], {}, {}, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceMigrate)) self.assertEqual(op.instance_name, name) self.assertFalse(hasattr(op, "mode")) self.assertFalse(hasattr(op, "cleanup")) self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestParseRenameInstanceRequest(testutils.GanetiTestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) name = "instij0eeph7" for new_name in ["ua0aiyoo", "fai3ongi"]: for ip_check in [False, True]: for name_check in [False, True]: data = { "new_name": new_name, "ip_check": ip_check, "name_check": name_check, } handler = _CreateHandler(rlib2.R_2_instances_name_rename, [name], {}, data, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceRename)) self.assertEqual(op.instance_name, name) self.assertEqual(op.new_name, new_name) self.assertEqual(op.ip_check, ip_check) self.assertEqual(op.name_check, name_check) self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testDefaults(self): clfactory = _FakeClientFactory(_FakeClient) name = "instahchie3t" for new_name in ["thag9mek", "quees7oh"]: data = { "new_name": new_name, } handler = _CreateHandler(rlib2.R_2_instances_name_rename, [name], {}, data, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceRename)) self.assertEqual(op.instance_name, name) self.assertEqual(op.new_name, new_name) self.assertFalse(hasattr(op, "ip_check")) self.assertFalse(hasattr(op, "name_check")) self.assertFalse(hasattr(op, "dry_run")) self.assertFalse(hasattr(op, "force")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestParseModifyInstanceRequest(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) name = "instush8gah" test_disks = [ [], [(1, { constants.IDISK_MODE: constants.DISK_RDWR, })], ] for osparams in [{}, { "some": "value", "other": "Hello World", }]: for hvparams in [{}, { constants.HV_KERNEL_PATH: "/some/kernel", }]: for beparams in [{}, { constants.BE_MAXMEM: 128, }]: for force in [False, True]: for nics in [[], [(0, { constants.INIC_IP: "192.0.2.1", })]]: for disks in test_disks: for disk_template in constants.DISK_TEMPLATES: data = { "osparams": osparams, "hvparams": hvparams, "beparams": beparams, "nics": nics, "disks": disks, "force": force, "disk_template": disk_template, } handler = _CreateHandler(rlib2.R_2_instances_name_modify, [name], {}, data, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceSetParams)) self.assertEqual(op.instance_name, name) self.assertEqual(op.hvparams, hvparams) self.assertEqual(op.beparams, beparams) self.assertEqual(op.osparams, osparams) self.assertEqual(op.force, force) self.assertEqual(op.nics, nics) self.assertEqual(op.disks, disks) self.assertEqual(op.disk_template, disk_template) self.assertFalse(hasattr(op, "remote_node")) self.assertFalse(hasattr(op, "os_name")) self.assertFalse(hasattr(op, "force_variant")) self.assertFalse(hasattr(op, "dry_run")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testDefaults(self): clfactory = _FakeClientFactory(_FakeClient) name = "instir8aish31" handler = _CreateHandler(rlib2.R_2_instances_name_modify, [name], {}, {}, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceSetParams)) self.assertEqual(op.instance_name, name) for i in ["hvparams", "beparams", "osparams", "force", "nics", "disks", "disk_template", "remote_node", "os_name", "force_variant"]: self.assertFalse(hasattr(op, i)) class TestParseInstanceReinstallRequest(testutils.GanetiTestCase): def setUp(self): testutils.GanetiTestCase.setUp(self) self.Parse = rlib2._ParseInstanceReinstallRequest def _Check(self, ops, name): expcls = [ opcodes.OpInstanceShutdown, opcodes.OpInstanceReinstall, opcodes.OpInstanceStartup, ] self.assert_(compat.all(isinstance(op, exp) for op, exp in zip(ops, expcls))) self.assert_(compat.all(op.instance_name == name for op in ops)) def test(self): name = "shoo0tihohma" ops = self.Parse(name, {"os": "sys1", "start": True,}) self.assertEqual(len(ops), 3) self._Check(ops, name) self.assertEqual(ops[1].os_type, "sys1") self.assertFalse(ops[1].osparams) ops = self.Parse(name, {"os": "sys2", "start": False,}) self.assertEqual(len(ops), 2) self._Check(ops, name) self.assertEqual(ops[1].os_type, "sys2") osparams = { "reformat": "1", } ops = self.Parse(name, {"os": "sys4035", "start": True, "osparams": osparams,}) self.assertEqual(len(ops), 3) self._Check(ops, name) self.assertEqual(ops[1].os_type, "sys4035") self.assertEqual(ops[1].osparams, osparams) def testDefaults(self): name = "noolee0g" ops = self.Parse(name, {"os": "linux1"}) self.assertEqual(len(ops), 3) self._Check(ops, name) self.assertEqual(ops[1].os_type, "linux1") self.assertFalse(ops[1].osparams) def testErrors(self): self.assertRaises(http.HttpBadRequest, self.Parse, "foo", "not a dictionary") class TestGroupRename(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) name = "group608242564" data = { "new_name": "ua0aiyoo15112", } handler = _CreateHandler(rlib2.R_2_groups_name_rename, [name], {}, data, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpGroupRename)) self.assertEqual(op.group_name, name) self.assertEqual(op.new_name, "ua0aiyoo15112") self.assertFalse(op.dry_run) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testDryRun(self): clfactory = _FakeClientFactory(_FakeClient) name = "group28548" data = { "new_name": "ua0aiyoo", } handler = _CreateHandler(rlib2.R_2_groups_name_rename, [name], { "dry-run": ["1"], }, data, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpGroupRename)) self.assertEqual(op.group_name, name) self.assertEqual(op.new_name, "ua0aiyoo") self.assertTrue(op.dry_run) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestInstanceReplaceDisks(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) name = "inst22568" for disks in [range(1, 4), "1,2,3", "1, 2, 3"]: data = { "mode": constants.REPLACE_DISK_SEC, "disks": disks, "iallocator": "myalloc", } handler = _CreateHandler(rlib2.R_2_instances_name_replace_disks, [name], {}, data, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceReplaceDisks)) self.assertEqual(op.instance_name, name) self.assertEqual(op.mode, constants.REPLACE_DISK_SEC) self.assertEqual(op.disks, [1, 2, 3]) self.assertEqual(op.iallocator, "myalloc") self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testDefaults(self): clfactory = _FakeClientFactory(_FakeClient) name = "inst11413" data = { "mode": constants.REPLACE_DISK_AUTO, } handler = _CreateHandler(rlib2.R_2_instances_name_replace_disks, [name], {}, data, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpInstanceReplaceDisks)) self.assertEqual(op.instance_name, name) self.assertEqual(op.mode, constants.REPLACE_DISK_AUTO) self.assertFalse(hasattr(op, "iallocator")) self.assertFalse(hasattr(op, "disks")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testNoDisks(self): clfactory = _FakeClientFactory(_FakeClient) handler = _CreateHandler(rlib2.R_2_instances_name_replace_disks, ["inst20661"], {}, {}, clfactory) self.assertRaises(http.HttpBadRequest, handler.POST) for disks in [None, "", {}]: handler = _CreateHandler(rlib2.R_2_instances_name_replace_disks, ["inst20661"], {}, { "disks": disks, }, clfactory) self.assertRaises(http.HttpBadRequest, handler.POST) def testWrong(self): clfactory = _FakeClientFactory(_FakeClient) data = { "mode": constants.REPLACE_DISK_AUTO, "disks": "hello world", } handler = _CreateHandler(rlib2.R_2_instances_name_replace_disks, ["foo"], {}, data, clfactory) self.assertRaises(http.HttpBadRequest, handler.POST) class TestGroupModify(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) name = "group6002" for policy in constants.VALID_ALLOC_POLICIES: data = { "alloc_policy": policy, } handler = _CreateHandler(rlib2.R_2_groups_name_modify, [name], {}, data, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpGroupSetParams)) self.assertEqual(op.group_name, name) self.assertEqual(op.alloc_policy, policy) self.assertFalse(hasattr(op, "dry_run")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testUnknownPolicy(self): clfactory = _FakeClientFactory(_FakeClient) data = { "alloc_policy": "_unknown_policy_", } handler = _CreateHandler(rlib2.R_2_groups_name_modify, ["xyz"], {}, data, clfactory) self.assertRaises(http.HttpBadRequest, handler.PUT) self.assertRaises(IndexError, clfactory.GetNextClient) def testDefaults(self): clfactory = _FakeClientFactory(_FakeClient) name = "group6679" handler = _CreateHandler(rlib2.R_2_groups_name_modify, [name], {}, {}, clfactory) job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpGroupSetParams)) self.assertEqual(op.group_name, name) self.assertFalse(hasattr(op, "alloc_policy")) self.assertFalse(hasattr(op, "dry_run")) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestGroupAdd(unittest.TestCase): def test(self): name = "group3618" clfactory = _FakeClientFactory(_FakeClient) for policy in constants.VALID_ALLOC_POLICIES: data = { "group_name": name, "alloc_policy": policy, } handler = _CreateHandler(rlib2.R_2_groups, [], {}, data, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpGroupAdd)) self.assertEqual(op.group_name, name) self.assertEqual(op.alloc_policy, policy) self.assertFalse(op.dry_run) self.assertRaises(IndexError, cl.GetNextSubmittedJob) def testUnknownPolicy(self): clfactory = _FakeClientFactory(_FakeClient) data = { "alloc_policy": "_unknown_policy_", } handler = _CreateHandler(rlib2.R_2_groups, [], {}, data, clfactory) self.assertRaises(http.HttpBadRequest, handler.POST) self.assertRaises(IndexError, clfactory.GetNextClient) def testDefaults(self): clfactory = _FakeClientFactory(_FakeClient) name = "group15395" data = { "group_name": name, } handler = _CreateHandler(rlib2.R_2_groups, [], {}, data, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpGroupAdd)) self.assertEqual(op.group_name, name) self.assertFalse(hasattr(op, "alloc_policy")) self.assertFalse(op.dry_run) def testLegacyName(self): clfactory = _FakeClientFactory(_FakeClient) name = "group29852" data = { "name": name, } handler = _CreateHandler(rlib2.R_2_groups, [], { "dry-run": ["1"], }, data, clfactory) job_id = handler.POST() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpGroupAdd)) self.assertEqual(op.group_name, name) self.assertFalse(hasattr(op, "alloc_policy")) self.assertTrue(op.dry_run) class TestNodeRole(unittest.TestCase): def test(self): clfactory = _FakeClientFactory(_FakeClient) for role in rlib2._NR_MAP.values(): handler = _CreateHandler(rlib2.R_2_nodes_name_role, ["node-z"], {}, role, clfactory) if role == rlib2._NR_MASTER: self.assertRaises(http.HttpBadRequest, handler.PUT) else: job_id = handler.PUT() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) (exp_job_id, (op, )) = cl.GetNextSubmittedJob() self.assertEqual(job_id, exp_job_id) self.assertTrue(isinstance(op, opcodes.OpNodeSetParams)) self.assertEqual(op.node_name, "node-z") self.assertFalse(op.force) self.assertFalse(hasattr(op, "dry_run")) if role == rlib2._NR_REGULAR: self.assertFalse(op.drained) self.assertFalse(op.offline) self.assertFalse(op.master_candidate) elif role == rlib2._NR_MASTER_CANDIDATE: self.assertFalse(op.drained) self.assertFalse(op.offline) self.assertTrue(op.master_candidate) elif role == rlib2._NR_DRAINED: self.assertTrue(op.drained) self.assertFalse(op.offline) self.assertFalse(op.master_candidate) elif role == rlib2._NR_OFFLINE: self.assertFalse(op.drained) self.assertTrue(op.offline) self.assertFalse(op.master_candidate) else: self.fail("Unknown role '%s'" % role) self.assertRaises(IndexError, cl.GetNextSubmittedJob) class TestSimpleResources(unittest.TestCase): def setUp(self): self.clfactory = _FakeClientFactory(_FakeClient) def tearDown(self): self.assertRaises(IndexError, self.clfactory.GetNextClient) def testFeatures(self): handler = _CreateHandler(rlib2.R_2_features, [], {}, None, self.clfactory) self.assertEqual(set(handler.GET()), rlib2.ALL_FEATURES) def testEmpty(self): for cls in [rlib2.R_root, rlib2.R_2]: handler = _CreateHandler(cls, [], {}, None, self.clfactory) self.assertTrue(handler.GET() is None) def testVersion(self): handler = _CreateHandler(rlib2.R_version, [], {}, None, self.clfactory) self.assertEqual(handler.GET(), constants.RAPI_VERSION) class TestClusterInfo(unittest.TestCase): class _ClusterInfoClient: def __init__(self, address=None): self.cluster_info = None def QueryClusterInfo(self): assert self.cluster_info is None self.cluster_info = object() return self.cluster_info def test(self): clfactory = _FakeClientFactory(self._ClusterInfoClient) handler = _CreateHandler(rlib2.R_2_info, [], {}, None, clfactory) result = handler.GET() cl = clfactory.GetNextClient() self.assertRaises(IndexError, clfactory.GetNextClient) self.assertEqual(result, cl.cluster_info) class TestInstancesMultiAlloc(unittest.TestCase): def testInstanceUpdate(self): clfactory = _FakeClientFactory(_FakeClient) data = { "instances": [{ "instance_name": "bar", "mode": "create", }, { "instance_name": "foo", "mode": "create", }], } handler = _CreateHandler(rlib2.R_2_instances_multi_alloc, [], {}, data, clfactory) (body, _) = handler.GetPostOpInput() self.assertTrue(compat.all([inst["OP_ID"] == handler.POST_OPCODE.OP_ID for inst in body["instances"]])) class TestPermissions(unittest.TestCase): def testEquality(self): self.assertEqual(rlib2.R_2_query.GET_ACCESS, rlib2.R_2_query.PUT_ACCESS) self.assertEqual(rlib2.R_2_query.GET_ACCESS, rlib2.R_2_instances_name_console.GET_ACCESS) def testMethodAccess(self): for handler in connector.CONNECTOR.values(): for method in baserlib._SUPPORTED_METHODS: access = getattr(handler, "%s_ACCESS" % method) self.assertFalse(set(access) - rapi.RAPI_ACCESS_ALL, msg=("Handler '%s' uses unknown access options for" " method %s" % (handler, method))) self.assertTrue(rapi.RAPI_ACCESS_READ not in access or rapi.RAPI_ACCESS_WRITE in access, msg=("Handler '%s' gives query, but not write access" " for method %s (the latter includes query and" " should therefore be given as well)" % (handler, method))) if __name__ == "__main__": testutils.GanetiTestProgram()
gpl-2.0
axtra/ansible
lib/ansible/runner/lookup_plugins/together.py
174
2135
# (c) 2013, Bradley Young <young.bradley@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import ansible.utils as utils from ansible.utils import safe_eval import ansible.errors as errors from itertools import izip_longest def flatten(terms): ret = [] for term in terms: if isinstance(term, list): ret.extend(term) elif isinstance(term, tuple): ret.extend(term) else: ret.append(term) return ret class LookupModule(object): """ Transpose a list of arrays: [1, 2, 3], [4, 5, 6] -> [1, 4], [2, 5], [3, 6] Replace any empty spots in 2nd array with None: [1, 2], [3] -> [1, 3], [2, None] """ def __init__(self, basedir=None, **kwargs): self.basedir = basedir def __lookup_injects(self, terms, inject): results = [] for x in terms: intermediate = utils.listify_lookup_plugin_terms(x, self.basedir, inject) results.append(intermediate) return results def run(self, terms, inject=None, **kwargs): # this code is common with 'items.py' consider moving to utils if we need it again terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) terms = self.__lookup_injects(terms, inject) my_list = terms[:] if len(my_list) == 0: raise errors.AnsibleError("with_together requires at least one element in each list") return [flatten(x) for x in izip_longest(*my_list, fillvalue=None)]
gpl-3.0
rohitw1991/smarttailorfrappe
frappe/tests/test_db_query.py
35
1520
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt import frappe, unittest from frappe.model.db_query import DatabaseQuery class TestReportview(unittest.TestCase): def test_basic(self): self.assertTrue({"name":"DocType"} in DatabaseQuery("DocType").execute(limit_page_length=None)) def test_fields(self): self.assertTrue({"name":"DocType", "issingle":0} \ in DatabaseQuery("DocType").execute(fields=["name", "issingle"], limit_page_length=None)) def test_filters_1(self): self.assertFalse({"name":"DocType"} \ in DatabaseQuery("DocType").execute(filters=[["DocType", "name", "like", "J%"]])) def test_filters_2(self): self.assertFalse({"name":"DocType"} \ in DatabaseQuery("DocType").execute(filters=[{"name": ["like", "J%"]}])) def test_filters_3(self): self.assertFalse({"name":"DocType"} \ in DatabaseQuery("DocType").execute(filters={"name": ["like", "J%"]})) def test_filters_4(self): self.assertTrue({"name":"DocField"} \ in DatabaseQuery("DocType").execute(filters={"name": "DocField"})) def test_or_filters(self): data = DatabaseQuery("DocField").execute( filters={"parent": "DocType"}, fields=["fieldname", "fieldtype"], or_filters=[{"fieldtype":"Table"}, {"fieldtype":"Select"}]) self.assertTrue({"fieldtype":"Table", "fieldname":"fields"} in data) self.assertTrue({"fieldtype":"Select", "fieldname":"document_type"} in data) self.assertFalse({"fieldtype":"Check", "fieldname":"issingle"} in data)
mit
gkoelln/youtube-dl
youtube_dl/extractor/cjsw.py
45
2412
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( determine_ext, unescapeHTML, ) class CJSWIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?cjsw\.com/program/(?P<program>[^/]+)/episode/(?P<id>\d+)' _TESTS = [{ 'url': 'http://cjsw.com/program/freshly-squeezed/episode/20170620', 'md5': 'cee14d40f1e9433632c56e3d14977120', 'info_dict': { 'id': '91d9f016-a2e7-46c5-8dcb-7cbcd7437c41', 'ext': 'mp3', 'title': 'Freshly Squeezed – Episode June 20, 2017', 'description': 'md5:c967d63366c3898a80d0c7b0ff337202', 'series': 'Freshly Squeezed', 'episode_id': '20170620', }, }, { # no description 'url': 'http://cjsw.com/program/road-pops/episode/20170707/', 'only_matching': True, }] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) program, episode_id = mobj.group('program', 'id') audio_id = '%s/%s' % (program, episode_id) webpage = self._download_webpage(url, episode_id) title = unescapeHTML(self._search_regex( (r'<h1[^>]+class=["\']episode-header__title["\'][^>]*>(?P<title>[^<]+)', r'data-audio-title=(["\'])(?P<title>(?:(?!\1).)+)\1'), webpage, 'title', group='title')) audio_url = self._search_regex( r'<button[^>]+data-audio-src=(["\'])(?P<url>(?:(?!\1).)+)\1', webpage, 'audio url', group='url') audio_id = self._search_regex( r'/([\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})\.mp3', audio_url, 'audio id', default=audio_id) formats = [{ 'url': audio_url, 'ext': determine_ext(audio_url, 'mp3'), 'vcodec': 'none', }] description = self._html_search_regex( r'<p>(?P<description>.+?)</p>', webpage, 'description', default=None) series = self._search_regex( r'data-showname=(["\'])(?P<name>(?:(?!\1).)+)\1', webpage, 'series', default=program, group='name') return { 'id': audio_id, 'title': title, 'description': description, 'formats': formats, 'series': series, 'episode_id': episode_id, }
unlicense
jcasner/nupic
external/linux32/lib/python2.6/site-packages/matplotlib/dviread.py
69
29920
""" An experimental module for reading dvi files output by TeX. Several limitations make this not (currently) useful as a general-purpose dvi preprocessor. Interface:: dvi = Dvi(filename, 72) for page in dvi: # iterate over pages w, h, d = page.width, page.height, page.descent for x,y,font,glyph,width in page.text: fontname = font.texname pointsize = font.size ... for x,y,height,width in page.boxes: ... """ import errno import matplotlib import matplotlib.cbook as mpl_cbook import numpy as np import struct import subprocess _dvistate = mpl_cbook.Bunch(pre=0, outer=1, inpage=2, post_post=3, finale=4) class Dvi(object): """ A dvi ("device-independent") file, as produced by TeX. The current implementation only reads the first page and does not even attempt to verify the postamble. """ def __init__(self, filename, dpi): """ Initialize the object. This takes the filename as input and opens the file; actually reading the file happens when iterating through the pages of the file. """ matplotlib.verbose.report('Dvi: ' + filename, 'debug') self.file = open(filename, 'rb') self.dpi = dpi self.fonts = {} self.state = _dvistate.pre def __iter__(self): """ Iterate through the pages of the file. Returns (text, pages) pairs, where: text is a list of (x, y, fontnum, glyphnum, width) tuples boxes is a list of (x, y, height, width) tuples The coordinates are transformed into a standard Cartesian coordinate system at the dpi value given when initializing. The coordinates are floating point numbers, but otherwise precision is not lost and coordinate values are not clipped to integers. """ while True: have_page = self._read() if have_page: yield self._output() else: break def close(self): """ Close the underlying file if it is open. """ if not self.file.closed: self.file.close() def _output(self): """ Output the text and boxes belonging to the most recent page. page = dvi._output() """ minx, miny, maxx, maxy = np.inf, np.inf, -np.inf, -np.inf maxy_pure = -np.inf for elt in self.text + self.boxes: if len(elt) == 4: # box x,y,h,w = elt e = 0 # zero depth else: # glyph x,y,font,g,w = elt h = _mul2012(font._scale, font._tfm.height[g]) e = _mul2012(font._scale, font._tfm.depth[g]) minx = min(minx, x) miny = min(miny, y - h) maxx = max(maxx, x + w) maxy = max(maxy, y + e) maxy_pure = max(maxy_pure, y) if self.dpi is None: # special case for ease of debugging: output raw dvi coordinates return mpl_cbook.Bunch(text=self.text, boxes=self.boxes, width=maxx-minx, height=maxy_pure-miny, descent=maxy-maxy_pure) d = self.dpi / (72.27 * 2**16) # from TeX's "scaled points" to dpi units text = [ ((x-minx)*d, (maxy-y)*d, f, g, w*d) for (x,y,f,g,w) in self.text ] boxes = [ ((x-minx)*d, (maxy-y)*d, h*d, w*d) for (x,y,h,w) in self.boxes ] return mpl_cbook.Bunch(text=text, boxes=boxes, width=(maxx-minx)*d, height=(maxy_pure-miny)*d, descent=(maxy-maxy_pure)*d) def _read(self): """ Read one page from the file. Return True if successful, False if there were no more pages. """ while True: byte = ord(self.file.read(1)) self._dispatch(byte) # if self.state == _dvistate.inpage: # matplotlib.verbose.report( # 'Dvi._read: after %d at %f,%f' % # (byte, self.h, self.v), # 'debug-annoying') if byte == 140: # end of page return True if self.state == _dvistate.post_post: # end of file self.close() return False def _arg(self, nbytes, signed=False): """ Read and return an integer argument "nbytes" long. Signedness is determined by the "signed" keyword. """ str = self.file.read(nbytes) value = ord(str[0]) if signed and value >= 0x80: value = value - 0x100 for i in range(1, nbytes): value = 0x100*value + ord(str[i]) return value def _dispatch(self, byte): """ Based on the opcode "byte", read the correct kinds of arguments from the dvi file and call the method implementing that opcode with those arguments. """ if 0 <= byte <= 127: self._set_char(byte) elif byte == 128: self._set_char(self._arg(1)) elif byte == 129: self._set_char(self._arg(2)) elif byte == 130: self._set_char(self._arg(3)) elif byte == 131: self._set_char(self._arg(4, True)) elif byte == 132: self._set_rule(self._arg(4, True), self._arg(4, True)) elif byte == 133: self._put_char(self._arg(1)) elif byte == 134: self._put_char(self._arg(2)) elif byte == 135: self._put_char(self._arg(3)) elif byte == 136: self._put_char(self._arg(4, True)) elif byte == 137: self._put_rule(self._arg(4, True), self._arg(4, True)) elif byte == 138: self._nop() elif byte == 139: self._bop(*[self._arg(4, True) for i in range(11)]) elif byte == 140: self._eop() elif byte == 141: self._push() elif byte == 142: self._pop() elif byte == 143: self._right(self._arg(1, True)) elif byte == 144: self._right(self._arg(2, True)) elif byte == 145: self._right(self._arg(3, True)) elif byte == 146: self._right(self._arg(4, True)) elif byte == 147: self._right_w(None) elif byte == 148: self._right_w(self._arg(1, True)) elif byte == 149: self._right_w(self._arg(2, True)) elif byte == 150: self._right_w(self._arg(3, True)) elif byte == 151: self._right_w(self._arg(4, True)) elif byte == 152: self._right_x(None) elif byte == 153: self._right_x(self._arg(1, True)) elif byte == 154: self._right_x(self._arg(2, True)) elif byte == 155: self._right_x(self._arg(3, True)) elif byte == 156: self._right_x(self._arg(4, True)) elif byte == 157: self._down(self._arg(1, True)) elif byte == 158: self._down(self._arg(2, True)) elif byte == 159: self._down(self._arg(3, True)) elif byte == 160: self._down(self._arg(4, True)) elif byte == 161: self._down_y(None) elif byte == 162: self._down_y(self._arg(1, True)) elif byte == 163: self._down_y(self._arg(2, True)) elif byte == 164: self._down_y(self._arg(3, True)) elif byte == 165: self._down_y(self._arg(4, True)) elif byte == 166: self._down_z(None) elif byte == 167: self._down_z(self._arg(1, True)) elif byte == 168: self._down_z(self._arg(2, True)) elif byte == 169: self._down_z(self._arg(3, True)) elif byte == 170: self._down_z(self._arg(4, True)) elif 171 <= byte <= 234: self._fnt_num(byte-171) elif byte == 235: self._fnt_num(self._arg(1)) elif byte == 236: self._fnt_num(self._arg(2)) elif byte == 237: self._fnt_num(self._arg(3)) elif byte == 238: self._fnt_num(self._arg(4, True)) elif 239 <= byte <= 242: len = self._arg(byte-238) special = self.file.read(len) self._xxx(special) elif 243 <= byte <= 246: k = self._arg(byte-242, byte==246) c, s, d, a, l = [ self._arg(x) for x in (4, 4, 4, 1, 1) ] n = self.file.read(a+l) self._fnt_def(k, c, s, d, a, l, n) elif byte == 247: i, num, den, mag, k = [ self._arg(x) for x in (1, 4, 4, 4, 1) ] x = self.file.read(k) self._pre(i, num, den, mag, x) elif byte == 248: self._post() elif byte == 249: self._post_post() else: raise ValueError, "unknown command: byte %d"%byte def _pre(self, i, num, den, mag, comment): if self.state != _dvistate.pre: raise ValueError, "pre command in middle of dvi file" if i != 2: raise ValueError, "Unknown dvi format %d"%i if num != 25400000 or den != 7227 * 2**16: raise ValueError, "nonstandard units in dvi file" # meaning: TeX always uses those exact values, so it # should be enough for us to support those # (There are 72.27 pt to an inch so 7227 pt = # 7227 * 2**16 sp to 100 in. The numerator is multiplied # by 10^5 to get units of 10**-7 meters.) if mag != 1000: raise ValueError, "nonstandard magnification in dvi file" # meaning: LaTeX seems to frown on setting \mag, so # I think we can assume this is constant self.state = _dvistate.outer def _set_char(self, char): if self.state != _dvistate.inpage: raise ValueError, "misplaced set_char in dvi file" self._put_char(char) self.h += self.fonts[self.f]._width_of(char) def _set_rule(self, a, b): if self.state != _dvistate.inpage: raise ValueError, "misplaced set_rule in dvi file" self._put_rule(a, b) self.h += b def _put_char(self, char): if self.state != _dvistate.inpage: raise ValueError, "misplaced put_char in dvi file" font = self.fonts[self.f] if font._vf is None: self.text.append((self.h, self.v, font, char, font._width_of(char))) # matplotlib.verbose.report( # 'Dvi._put_char: %d,%d %d' %(self.h, self.v, char), # 'debug-annoying') else: scale = font._scale for x, y, f, g, w in font._vf[char].text: newf = DviFont(scale=_mul2012(scale, f._scale), tfm=f._tfm, texname=f.texname, vf=f._vf) self.text.append((self.h + _mul2012(x, scale), self.v + _mul2012(y, scale), newf, g, newf._width_of(g))) self.boxes.extend([(self.h + _mul2012(x, scale), self.v + _mul2012(y, scale), _mul2012(a, scale), _mul2012(b, scale)) for x, y, a, b in font._vf[char].boxes]) def _put_rule(self, a, b): if self.state != _dvistate.inpage: raise ValueError, "misplaced put_rule in dvi file" if a > 0 and b > 0: self.boxes.append((self.h, self.v, a, b)) # matplotlib.verbose.report( # 'Dvi._put_rule: %d,%d %d,%d' % (self.h, self.v, a, b), # 'debug-annoying') def _nop(self): pass def _bop(self, c0, c1, c2, c3, c4, c5, c6, c7, c8, c9, p): if self.state != _dvistate.outer: raise ValueError, \ "misplaced bop in dvi file (state %d)" % self.state self.state = _dvistate.inpage self.h, self.v, self.w, self.x, self.y, self.z = 0, 0, 0, 0, 0, 0 self.stack = [] self.text = [] # list of (x,y,fontnum,glyphnum) self.boxes = [] # list of (x,y,width,height) def _eop(self): if self.state != _dvistate.inpage: raise ValueError, "misplaced eop in dvi file" self.state = _dvistate.outer del self.h, self.v, self.w, self.x, self.y, self.z, self.stack def _push(self): if self.state != _dvistate.inpage: raise ValueError, "misplaced push in dvi file" self.stack.append((self.h, self.v, self.w, self.x, self.y, self.z)) def _pop(self): if self.state != _dvistate.inpage: raise ValueError, "misplaced pop in dvi file" self.h, self.v, self.w, self.x, self.y, self.z = self.stack.pop() def _right(self, b): if self.state != _dvistate.inpage: raise ValueError, "misplaced right in dvi file" self.h += b def _right_w(self, new_w): if self.state != _dvistate.inpage: raise ValueError, "misplaced w in dvi file" if new_w is not None: self.w = new_w self.h += self.w def _right_x(self, new_x): if self.state != _dvistate.inpage: raise ValueError, "misplaced x in dvi file" if new_x is not None: self.x = new_x self.h += self.x def _down(self, a): if self.state != _dvistate.inpage: raise ValueError, "misplaced down in dvi file" self.v += a def _down_y(self, new_y): if self.state != _dvistate.inpage: raise ValueError, "misplaced y in dvi file" if new_y is not None: self.y = new_y self.v += self.y def _down_z(self, new_z): if self.state != _dvistate.inpage: raise ValueError, "misplaced z in dvi file" if new_z is not None: self.z = new_z self.v += self.z def _fnt_num(self, k): if self.state != _dvistate.inpage: raise ValueError, "misplaced fnt_num in dvi file" self.f = k def _xxx(self, special): matplotlib.verbose.report( 'Dvi._xxx: encountered special: %s' % ''.join([(32 <= ord(ch) < 127) and ch or '<%02x>' % ord(ch) for ch in special]), 'debug') def _fnt_def(self, k, c, s, d, a, l, n): tfm = _tfmfile(n[-l:]) if c != 0 and tfm.checksum != 0 and c != tfm.checksum: raise ValueError, 'tfm checksum mismatch: %s'%n # It seems that the assumption behind the following check is incorrect: #if d != tfm.design_size: # raise ValueError, 'tfm design size mismatch: %d in dvi, %d in %s'%\ # (d, tfm.design_size, n) vf = _vffile(n[-l:]) self.fonts[k] = DviFont(scale=s, tfm=tfm, texname=n, vf=vf) def _post(self): if self.state != _dvistate.outer: raise ValueError, "misplaced post in dvi file" self.state = _dvistate.post_post # TODO: actually read the postamble and finale? # currently post_post just triggers closing the file def _post_post(self): raise NotImplementedError class DviFont(object): """ Object that holds a font's texname and size, supports comparison, and knows the widths of glyphs in the same units as the AFM file. There are also internal attributes (for use by dviread.py) that are _not_ used for comparison. The size is in Adobe points (converted from TeX points). """ __slots__ = ('texname', 'size', 'widths', '_scale', '_vf', '_tfm') def __init__(self, scale, tfm, texname, vf): self._scale, self._tfm, self.texname, self._vf = \ scale, tfm, texname, vf self.size = scale * (72.0 / (72.27 * 2**16)) try: nchars = max(tfm.width.iterkeys()) except ValueError: nchars = 0 self.widths = [ (1000*tfm.width.get(char, 0)) >> 20 for char in range(nchars) ] def __eq__(self, other): return self.__class__ == other.__class__ and \ self.texname == other.texname and self.size == other.size def __ne__(self, other): return not self.__eq__(other) def _width_of(self, char): """ Width of char in dvi units. For internal use by dviread.py. """ width = self._tfm.width.get(char, None) if width is not None: return _mul2012(width, self._scale) matplotlib.verbose.report( 'No width for char %d in font %s' % (char, self.texname), 'debug') return 0 class Vf(Dvi): """ A virtual font (\*.vf file) containing subroutines for dvi files. Usage:: vf = Vf(filename) glyph = vf[code] glyph.text, glyph.boxes, glyph.width """ def __init__(self, filename): Dvi.__init__(self, filename, 0) self._first_font = None self._chars = {} self._packet_ends = None self._read() self.close() def __getitem__(self, code): return self._chars[code] def _dispatch(self, byte): # If we are in a packet, execute the dvi instructions if self.state == _dvistate.inpage: byte_at = self.file.tell()-1 if byte_at == self._packet_ends: self._finalize_packet() # fall through elif byte_at > self._packet_ends: raise ValueError, "Packet length mismatch in vf file" else: if byte in (139, 140) or byte >= 243: raise ValueError, "Inappropriate opcode %d in vf file" % byte Dvi._dispatch(self, byte) return # We are outside a packet if byte < 242: # a short packet (length given by byte) cc, tfm = self._arg(1), self._arg(3) self._init_packet(byte, cc, tfm) elif byte == 242: # a long packet pl, cc, tfm = [ self._arg(x) for x in (4, 4, 4) ] self._init_packet(pl, cc, tfm) elif 243 <= byte <= 246: Dvi._dispatch(self, byte) elif byte == 247: # preamble i, k = self._arg(1), self._arg(1) x = self.file.read(k) cs, ds = self._arg(4), self._arg(4) self._pre(i, x, cs, ds) elif byte == 248: # postamble (just some number of 248s) self.state = _dvistate.post_post else: raise ValueError, "unknown vf opcode %d" % byte def _init_packet(self, pl, cc, tfm): if self.state != _dvistate.outer: raise ValueError, "Misplaced packet in vf file" self.state = _dvistate.inpage self._packet_ends = self.file.tell() + pl self._packet_char = cc self._packet_width = tfm self.h, self.v, self.w, self.x, self.y, self.z = 0, 0, 0, 0, 0, 0 self.stack, self.text, self.boxes = [], [], [] self.f = self._first_font def _finalize_packet(self): self._chars[self._packet_char] = mpl_cbook.Bunch( text=self.text, boxes=self.boxes, width = self._packet_width) self.state = _dvistate.outer def _pre(self, i, x, cs, ds): if self.state != _dvistate.pre: raise ValueError, "pre command in middle of vf file" if i != 202: raise ValueError, "Unknown vf format %d" % i if len(x): matplotlib.verbose.report('vf file comment: ' + x, 'debug') self.state = _dvistate.outer # cs = checksum, ds = design size def _fnt_def(self, k, *args): Dvi._fnt_def(self, k, *args) if self._first_font is None: self._first_font = k def _fix2comp(num): """ Convert from two's complement to negative. """ assert 0 <= num < 2**32 if num & 2**31: return num - 2**32 else: return num def _mul2012(num1, num2): """ Multiply two numbers in 20.12 fixed point format. """ # Separated into a function because >> has surprising precedence return (num1*num2) >> 20 class Tfm(object): """ A TeX Font Metric file. This implementation covers only the bare minimum needed by the Dvi class. Attributes: checksum: for verifying against dvi file design_size: design size of the font (in what units?) width[i]: width of character \#i, needs to be scaled by the factor specified in the dvi file (this is a dict because indexing may not start from 0) height[i], depth[i]: height and depth of character \#i """ __slots__ = ('checksum', 'design_size', 'width', 'height', 'depth') def __init__(self, filename): matplotlib.verbose.report('opening tfm file ' + filename, 'debug') file = open(filename, 'rb') try: header1 = file.read(24) lh, bc, ec, nw, nh, nd = \ struct.unpack('!6H', header1[2:14]) matplotlib.verbose.report( 'lh=%d, bc=%d, ec=%d, nw=%d, nh=%d, nd=%d' % ( lh, bc, ec, nw, nh, nd), 'debug') header2 = file.read(4*lh) self.checksum, self.design_size = \ struct.unpack('!2I', header2[:8]) # there is also encoding information etc. char_info = file.read(4*(ec-bc+1)) widths = file.read(4*nw) heights = file.read(4*nh) depths = file.read(4*nd) finally: file.close() self.width, self.height, self.depth = {}, {}, {} widths, heights, depths = \ [ struct.unpack('!%dI' % (len(x)/4), x) for x in (widths, heights, depths) ] for i in range(ec-bc): self.width[bc+i] = _fix2comp(widths[ord(char_info[4*i])]) self.height[bc+i] = _fix2comp(heights[ord(char_info[4*i+1]) >> 4]) self.depth[bc+i] = _fix2comp(depths[ord(char_info[4*i+1]) & 0xf]) class PsfontsMap(object): """ A psfonts.map formatted file, mapping TeX fonts to PS fonts. Usage: map = PsfontsMap('.../psfonts.map'); map['cmr10'] For historical reasons, TeX knows many Type-1 fonts by different names than the outside world. (For one thing, the names have to fit in eight characters.) Also, TeX's native fonts are not Type-1 but Metafont, which is nontrivial to convert to PostScript except as a bitmap. While high-quality conversions to Type-1 format exist and are shipped with modern TeX distributions, we need to know which Type-1 fonts are the counterparts of which native fonts. For these reasons a mapping is needed from internal font names to font file names. A texmf tree typically includes mapping files called e.g. psfonts.map, pdftex.map, dvipdfm.map. psfonts.map is used by dvips, pdftex.map by pdfTeX, and dvipdfm.map by dvipdfm. psfonts.map might avoid embedding the 35 PostScript fonts, while the pdf-related files perhaps only avoid the "Base 14" pdf fonts. But the user may have configured these files differently. """ __slots__ = ('_font',) def __init__(self, filename): self._font = {} file = open(filename, 'rt') try: self._parse(file) finally: file.close() def __getitem__(self, texname): result = self._font[texname] fn, enc = result.filename, result.encoding if fn is not None and not fn.startswith('/'): result.filename = find_tex_file(fn) if enc is not None and not enc.startswith('/'): result.encoding = find_tex_file(result.encoding) return result def _parse(self, file): """Parse each line into words.""" for line in file: line = line.strip() if line == '' or line.startswith('%'): continue words, pos = [], 0 while pos < len(line): if line[pos] == '"': # double quoted word pos += 1 end = line.index('"', pos) words.append(line[pos:end]) pos = end + 1 else: # ordinary word end = line.find(' ', pos+1) if end == -1: end = len(line) words.append(line[pos:end]) pos = end while pos < len(line) and line[pos] == ' ': pos += 1 self._register(words) def _register(self, words): """Register a font described by "words". The format is, AFAIK: texname fontname [effects and filenames] Effects are PostScript snippets like ".177 SlantFont", filenames begin with one or two less-than signs. A filename ending in enc is an encoding file, other filenames are font files. This can be overridden with a left bracket: <[foobar indicates an encoding file named foobar. There is some difference between <foo.pfb and <<bar.pfb in subsetting, but I have no example of << in my TeX installation. """ texname, psname = words[:2] effects, encoding, filename = [], None, None for word in words[2:]: if not word.startswith('<'): effects.append(word) else: word = word.lstrip('<') if word.startswith('['): assert encoding is None encoding = word[1:] elif word.endswith('.enc'): assert encoding is None encoding = word else: assert filename is None filename = word self._font[texname] = mpl_cbook.Bunch( texname=texname, psname=psname, effects=effects, encoding=encoding, filename=filename) class Encoding(object): """ Parses a \*.enc file referenced from a psfonts.map style file. The format this class understands is a very limited subset of PostScript. Usage (subject to change):: for name in Encoding(filename): whatever(name) """ __slots__ = ('encoding',) def __init__(self, filename): file = open(filename, 'rt') try: matplotlib.verbose.report('Parsing TeX encoding ' + filename, 'debug-annoying') self.encoding = self._parse(file) matplotlib.verbose.report('Result: ' + `self.encoding`, 'debug-annoying') finally: file.close() def __iter__(self): for name in self.encoding: yield name def _parse(self, file): result = [] state = 0 for line in file: comment_start = line.find('%') if comment_start > -1: line = line[:comment_start] line = line.strip() if state == 0: # Expecting something like /FooEncoding [ if '[' in line: state = 1 line = line[line.index('[')+1:].strip() if state == 1: if ']' in line: # ] def line = line[:line.index(']')] state = 2 words = line.split() for w in words: if w.startswith('/'): # Allow for /abc/def/ghi subwords = w.split('/') result.extend(subwords[1:]) else: raise ValueError, "Broken name in encoding file: " + w return result def find_tex_file(filename, format=None): """ Call kpsewhich to find a file in the texmf tree. If format is not None, it is used as the value for the --format option. See the kpathsea documentation for more information. Apparently most existing TeX distributions on Unix-like systems use kpathsea. I hear MikTeX (a popular distribution on Windows) doesn't use kpathsea, so what do we do? (TODO) """ cmd = ['kpsewhich'] if format is not None: cmd += ['--format=' + format] cmd += [filename] matplotlib.verbose.report('find_tex_file(%s): %s' \ % (filename,cmd), 'debug') pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE) result = pipe.communicate()[0].rstrip() matplotlib.verbose.report('find_tex_file result: %s' % result, 'debug') return result def _read_nointr(pipe, bufsize=-1): while True: try: return pipe.read(bufsize) except OSError, e: if e.errno == errno.EINTR: continue else: raise # With multiple text objects per figure (e.g. tick labels) we may end # up reading the same tfm and vf files many times, so we implement a # simple cache. TODO: is this worth making persistent? _tfmcache = {} _vfcache = {} def _fontfile(texname, class_, suffix, cache): try: return cache[texname] except KeyError: pass filename = find_tex_file(texname + suffix) if filename: result = class_(filename) else: result = None cache[texname] = result return result def _tfmfile(texname): return _fontfile(texname, Tfm, '.tfm', _tfmcache) def _vffile(texname): return _fontfile(texname, Vf, '.vf', _vfcache) if __name__ == '__main__': import sys matplotlib.verbose.set_level('debug-annoying') fname = sys.argv[1] try: dpi = float(sys.argv[2]) except IndexError: dpi = None dvi = Dvi(fname, dpi) fontmap = PsfontsMap(find_tex_file('pdftex.map')) for page in dvi: print '=== new page ===' fPrev = None for x,y,f,c,w in page.text: if f != fPrev: print 'font', f.texname, 'scaled', f._scale/pow(2.0,20) fPrev = f print x,y,c, 32 <= c < 128 and chr(c) or '.', w for x,y,w,h in page.boxes: print x,y,'BOX',w,h
agpl-3.0
GdZ/scriptfile
software/googleAppEngine/lib/django_1_2/django/http/__init__.py
50
17449
import os import re from Cookie import BaseCookie, SimpleCookie, CookieError from pprint import pformat from urllib import urlencode from urlparse import urljoin try: # The mod_python version is more efficient, so try importing it first. from mod_python.util import parse_qsl except ImportError: from cgi import parse_qsl from django.utils.datastructures import MultiValueDict, ImmutableList from django.utils.encoding import smart_str, iri_to_uri, force_unicode from django.http.multipartparser import MultiPartParser from django.conf import settings from django.core.files import uploadhandler from utils import * RESERVED_CHARS="!*'();:@&=+$,/?%#[]" absolute_http_url_re = re.compile(r"^https?://", re.I) class Http404(Exception): pass class HttpRequest(object): """A basic HTTP request.""" # The encoding used in GET/POST dicts. None means use default setting. _encoding = None _upload_handlers = [] def __init__(self): self.GET, self.POST, self.COOKIES, self.META, self.FILES = {}, {}, {}, {}, {} self.path = '' self.path_info = '' self.method = None def __repr__(self): return '<HttpRequest\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \ (pformat(self.GET), pformat(self.POST), pformat(self.COOKIES), pformat(self.META)) def get_host(self): """Returns the HTTP host using the environment or request headers.""" # We try three options, in order of decreasing preference. if 'HTTP_X_FORWARDED_HOST' in self.META: host = self.META['HTTP_X_FORWARDED_HOST'] elif 'HTTP_HOST' in self.META: host = self.META['HTTP_HOST'] else: # Reconstruct the host using the algorithm from PEP 333. host = self.META['SERVER_NAME'] server_port = str(self.META['SERVER_PORT']) if server_port != (self.is_secure() and '443' or '80'): host = '%s:%s' % (host, server_port) return host def get_full_path(self): return '' def build_absolute_uri(self, location=None): """ Builds an absolute URI from the location and the variables available in this request. If no location is specified, the absolute URI is built on ``request.get_full_path()``. """ if not location: location = self.get_full_path() if not absolute_http_url_re.match(location): current_uri = '%s://%s%s' % (self.is_secure() and 'https' or 'http', self.get_host(), self.path) location = urljoin(current_uri, location) return iri_to_uri(location) def is_secure(self): return os.environ.get("HTTPS") == "on" def is_ajax(self): return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest' def _set_encoding(self, val): """ Sets the encoding used for GET/POST accesses. If the GET or POST dictionary has already been created, it is removed and recreated on the next access (so that it is decoded correctly). """ self._encoding = val if hasattr(self, '_get'): del self._get if hasattr(self, '_post'): del self._post def _get_encoding(self): return self._encoding encoding = property(_get_encoding, _set_encoding) def _initialize_handlers(self): self._upload_handlers = [uploadhandler.load_handler(handler, self) for handler in settings.FILE_UPLOAD_HANDLERS] def _set_upload_handlers(self, upload_handlers): if hasattr(self, '_files'): raise AttributeError("You cannot set the upload handlers after the upload has been processed.") self._upload_handlers = upload_handlers def _get_upload_handlers(self): if not self._upload_handlers: # If thre are no upload handlers defined, initialize them from settings. self._initialize_handlers() return self._upload_handlers upload_handlers = property(_get_upload_handlers, _set_upload_handlers) def parse_file_upload(self, META, post_data): """Returns a tuple of (POST QueryDict, FILES MultiValueDict).""" self.upload_handlers = ImmutableList( self.upload_handlers, warning = "You cannot alter upload handlers after the upload has been processed." ) parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding) return parser.parse() class QueryDict(MultiValueDict): """ A specialized MultiValueDict that takes a query string when initialized. This is immutable unless you create a copy of it. Values retrieved from this class are converted from the given encoding (DEFAULT_CHARSET by default) to unicode. """ # These are both reset in __init__, but is specified here at the class # level so that unpickling will have valid values _mutable = True _encoding = None def __init__(self, query_string, mutable=False, encoding=None): MultiValueDict.__init__(self) if not encoding: # *Important*: do not import settings any earlier because of note # in core.handlers.modpython. from django.conf import settings encoding = settings.DEFAULT_CHARSET self.encoding = encoding for key, value in parse_qsl((query_string or ''), True): # keep_blank_values=True self.appendlist(force_unicode(key, encoding, errors='replace'), force_unicode(value, encoding, errors='replace')) self._mutable = mutable def _get_encoding(self): if self._encoding is None: # *Important*: do not import settings at the module level because # of the note in core.handlers.modpython. from django.conf import settings self._encoding = settings.DEFAULT_CHARSET return self._encoding def _set_encoding(self, value): self._encoding = value encoding = property(_get_encoding, _set_encoding) def _assert_mutable(self): if not self._mutable: raise AttributeError("This QueryDict instance is immutable") def __setitem__(self, key, value): self._assert_mutable() key = str_to_unicode(key, self.encoding) value = str_to_unicode(value, self.encoding) MultiValueDict.__setitem__(self, key, value) def __delitem__(self, key): self._assert_mutable() super(QueryDict, self).__delitem__(key) def __copy__(self): result = self.__class__('', mutable=True, encoding=self.encoding) for key, value in dict.items(self): dict.__setitem__(result, key, value) return result def __deepcopy__(self, memo): import django.utils.copycompat as copy result = self.__class__('', mutable=True, encoding=self.encoding) memo[id(self)] = result for key, value in dict.items(self): dict.__setitem__(result, copy.deepcopy(key, memo), copy.deepcopy(value, memo)) return result def setlist(self, key, list_): self._assert_mutable() key = str_to_unicode(key, self.encoding) list_ = [str_to_unicode(elt, self.encoding) for elt in list_] MultiValueDict.setlist(self, key, list_) def setlistdefault(self, key, default_list=()): self._assert_mutable() if key not in self: self.setlist(key, default_list) return MultiValueDict.getlist(self, key) def appendlist(self, key, value): self._assert_mutable() key = str_to_unicode(key, self.encoding) value = str_to_unicode(value, self.encoding) MultiValueDict.appendlist(self, key, value) def update(self, other_dict): self._assert_mutable() f = lambda s: str_to_unicode(s, self.encoding) if hasattr(other_dict, 'lists'): for key, valuelist in other_dict.lists(): for value in valuelist: MultiValueDict.update(self, {f(key): f(value)}) else: d = dict([(f(k), f(v)) for k, v in other_dict.items()]) MultiValueDict.update(self, d) def pop(self, key, *args): self._assert_mutable() return MultiValueDict.pop(self, key, *args) def popitem(self): self._assert_mutable() return MultiValueDict.popitem(self) def clear(self): self._assert_mutable() MultiValueDict.clear(self) def setdefault(self, key, default=None): self._assert_mutable() key = str_to_unicode(key, self.encoding) default = str_to_unicode(default, self.encoding) return MultiValueDict.setdefault(self, key, default) def copy(self): """Returns a mutable copy of this object.""" return self.__deepcopy__({}) def urlencode(self): output = [] for k, list_ in self.lists(): k = smart_str(k, self.encoding) output.extend([urlencode({k: smart_str(v, self.encoding)}) for v in list_]) return '&'.join(output) class CompatCookie(SimpleCookie): """ Cookie class that handles some issues with browser compatibility. """ def value_encode(self, val): # Some browsers do not support quoted-string from RFC 2109, # including some versions of Safari and Internet Explorer. # These browsers split on ';', and some versions of Safari # are known to split on ', '. Therefore, we encode ';' and ',' # SimpleCookie already does the hard work of encoding and decoding. # It uses octal sequences like '\\012' for newline etc. # and non-ASCII chars. We just make use of this mechanism, to # avoid introducing two encoding schemes which would be confusing # and especially awkward for javascript. # NB, contrary to Python docs, value_encode returns a tuple containing # (real val, encoded_val) val, encoded = super(CompatCookie, self).value_encode(val) encoded = encoded.replace(";", "\\073").replace(",","\\054") # If encoded now contains any quoted chars, we need double quotes # around the whole string. if "\\" in encoded and not encoded.startswith('"'): encoded = '"' + encoded + '"' return val, encoded def parse_cookie(cookie): if cookie == '': return {} if not isinstance(cookie, BaseCookie): try: c = CompatCookie() c.load(cookie) except CookieError: # Invalid cookie return {} else: c = cookie cookiedict = {} for key in c.keys(): cookiedict[key] = c.get(key).value return cookiedict class BadHeaderError(ValueError): pass class HttpResponse(object): """A basic HTTP response, with content and dictionary-accessed headers.""" status_code = 200 def __init__(self, content='', mimetype=None, status=None, content_type=None): # _headers is a mapping of the lower-case name to the original case of # the header (required for working with legacy systems) and the header # value. Both the name of the header and its value are ASCII strings. self._headers = {} self._charset = settings.DEFAULT_CHARSET if mimetype: content_type = mimetype # For backwards compatibility if not content_type: content_type = "%s; charset=%s" % (settings.DEFAULT_CONTENT_TYPE, self._charset) if not isinstance(content, basestring) and hasattr(content, '__iter__'): self._container = content self._is_string = False else: self._container = [content] self._is_string = True self.cookies = CompatCookie() if status: self.status_code = status self['Content-Type'] = content_type def __str__(self): """Full HTTP message, including headers.""" return '\n'.join(['%s: %s' % (key, value) for key, value in self._headers.values()]) \ + '\n\n' + self.content def _convert_to_ascii(self, *values): """Converts all values to ascii strings.""" for value in values: if isinstance(value, unicode): try: value = value.encode('us-ascii') except UnicodeError, e: e.reason += ', HTTP response headers must be in US-ASCII format' raise else: value = str(value) if '\n' in value or '\r' in value: raise BadHeaderError("Header values can't contain newlines (got %r)" % (value)) yield value def __setitem__(self, header, value): header, value = self._convert_to_ascii(header, value) self._headers[header.lower()] = (header, value) def __delitem__(self, header): try: del self._headers[header.lower()] except KeyError: pass def __getitem__(self, header): return self._headers[header.lower()][1] def has_header(self, header): """Case-insensitive check for a header.""" return self._headers.has_key(header.lower()) __contains__ = has_header def items(self): return self._headers.values() def get(self, header, alternate): return self._headers.get(header.lower(), (None, alternate))[1] def set_cookie(self, key, value='', max_age=None, expires=None, path='/', domain=None, secure=False): self.cookies[key] = value if max_age is not None: self.cookies[key]['max-age'] = max_age if expires is not None: self.cookies[key]['expires'] = expires if path is not None: self.cookies[key]['path'] = path if domain is not None: self.cookies[key]['domain'] = domain if secure: self.cookies[key]['secure'] = True def delete_cookie(self, key, path='/', domain=None): self.set_cookie(key, max_age=0, path=path, domain=domain, expires='Thu, 01-Jan-1970 00:00:00 GMT') def _get_content(self): if self.has_header('Content-Encoding'): return ''.join(self._container) return smart_str(''.join(self._container), self._charset) def _set_content(self, value): self._container = [value] self._is_string = True content = property(_get_content, _set_content) def __iter__(self): self._iterator = iter(self._container) return self def next(self): chunk = self._iterator.next() if isinstance(chunk, unicode): chunk = chunk.encode(self._charset) return str(chunk) def close(self): if hasattr(self._container, 'close'): self._container.close() # The remaining methods partially implement the file-like object interface. # See http://docs.python.org/lib/bltin-file-objects.html def write(self, content): if not self._is_string: raise Exception("This %s instance is not writable" % self.__class__) self._container.append(content) def flush(self): pass def tell(self): if not self._is_string: raise Exception("This %s instance cannot tell its position" % self.__class__) return sum([len(chunk) for chunk in self._container]) class HttpResponseRedirect(HttpResponse): status_code = 302 def __init__(self, redirect_to): HttpResponse.__init__(self) self['Location'] = iri_to_uri(redirect_to) class HttpResponsePermanentRedirect(HttpResponse): status_code = 301 def __init__(self, redirect_to): HttpResponse.__init__(self) self['Location'] = iri_to_uri(redirect_to) class HttpResponseNotModified(HttpResponse): status_code = 304 class HttpResponseBadRequest(HttpResponse): status_code = 400 class HttpResponseNotFound(HttpResponse): status_code = 404 class HttpResponseForbidden(HttpResponse): status_code = 403 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, permitted_methods): HttpResponse.__init__(self) self['Allow'] = ', '.join(permitted_methods) class HttpResponseGone(HttpResponse): status_code = 410 def __init__(self, *args, **kwargs): HttpResponse.__init__(self, *args, **kwargs) class HttpResponseServerError(HttpResponse): status_code = 500 def __init__(self, *args, **kwargs): HttpResponse.__init__(self, *args, **kwargs) # A backwards compatible alias for HttpRequest.get_host. def get_host(request): return request.get_host() # It's neither necessary nor appropriate to use # django.utils.encoding.smart_unicode for parsing URLs and form inputs. Thus, # this slightly more restricted function. def str_to_unicode(s, encoding): """ Converts basestring objects to unicode, using the given encoding. Illegally encoded input characters are replaced with Unicode "unknown" codepoint (\ufffd). Returns any non-basestring objects without change. """ if isinstance(s, str): return unicode(s, encoding, 'replace') else: return s
mit
avannaldas/QuickView
QuickView/DataFrameVisualizer.py
1
5101
''' Title: QuickView Purpose: Provides a Glance at the dataset with one line of code! GitHub: http://github.com/avannaldas/QuickView Author: Abhijit Annaldas (Twitter @avannaldas) ''' import pandas as _pd import matplotlib.pyplot as _plt from matplotlib.pyplot import cm '''Number of rows in the dataframe''' row_count = -1 '''Number of columns in the dataframe''' column_count = -1 ''' List of numeric column names''' numeric_column_names = [] '''List of text column names''' text_column_names = [] '''List of categorical column names''' categorical_column_names = [] '''Dict of Column names and distinct categrical values''' categorical_column_values = dict() ''' Dict of Column names and number of distinct categrical values''' categorical_column_values_count = dict() '''Pandas dataframe object containing rows with at least one null/na values''' rows_with_nulls = _pd.DataFrame() '''Dict of Column names and number of null/na values''' columnwise_null_values_count = dict() '''Pandas dataframe object with min, max, mean and std of all numeric columns''' min_max_mean_std = _pd.DataFrame() '''Indicates whether the visualization is complete, values for all the member variables are updated after visualize() method is complete''' loaded = False # def visualize(df, print_summaries=True, cat_pthreshold=5, cat_cthreshold=-1): row_count = df.shape[0] column_count = df.shape[1] rows_with_nulls = df[_pd.isnull(df).any(axis=1)] numeric_types = ['int16', 'int32', 'int64', 'float16', 'float32', 'float64', 'int', 'float', 'long', 'complex'] numeric_column_names = list(df.select_dtypes(include=numeric_types).columns) __text_column_names = list(df.select_dtypes(exclude=numeric_types).columns) for c in __text_column_names: if (((df[c].nunique()/row_count) * 100) < cat_pthreshold) or (df[c].nunique() < cat_cthreshold): categorical_column_names.append(c) categorical_column_values[c] = df[c].unique() categorical_column_values_count[c] = df[c].nunique() else: text_column_names.append(c) for c in list(df.columns): if df[c].isnull().sum() > 0: columnwise_null_values_count[c] = df[c].isnull().sum() minSumm = df.min(axis=0, skipna=True, numeric_only=True) maxSumm = df.max(axis=0, skipna=True, numeric_only=True) meanSumm = df.mean(axis=0, skipna=True, numeric_only=True) stdSumm = df.std(axis=0, skipna=True, numeric_only=True) min_max_mean_std = _pd.DataFrame(dict(min=minSumm, max=maxSumm, mean=meanSumm, std=stdSumm), index=stdSumm.index) loaded = True if print_summaries == True: print() print('Here is a summary of the Dataset, aka Quick View :P ...') print() print('Rows count: ' + str(row_count)) print('Columns count: ' + str(column_count)) print() print('Number of rows having null value(s): ' + str(rows_with_nulls.shape[0])) print() print('Numeric Columns: ' + (', '.join(numeric_column_names))) print() print('Categorical Columns: ' + (', '.join(categorical_column_names))) print() print('Text Columns: ' + (', '.join(text_column_names))) print() print('Columns with null values...') for k, v in columnwise_null_values_count.items(): print(k + ' : ' + str(v)) print() print('Distinct values in categorical columns...') for col, vals in categorical_column_values.items(): print('{Column Name}: ' + col + ' {Values}: ' + ((', ').join(str(v) for v in vals))) print() print('Min, Max, Mean and std...') print(min_max_mean_std) print() # START PLOTTING # Column-wise plot null count _plt.bar(range(len(columnwise_null_values_count)), columnwise_null_values_count.values(), align='center') _plt.xticks(range(len(columnwise_null_values_count)), columnwise_null_values_count.keys(), rotation='vertical') _plt.xlabel('Column names') _plt.ylabel('# of Null values') _plt.title('Column-wise null value counts') _plt.tight_layout() _plt.show() print() # Column-wise plot unique categorical values count _plt.bar(range(len(categorical_column_values_count)), categorical_column_values_count.values(), align='center') _plt.xticks(range(len(categorical_column_values_count)), categorical_column_values_count.keys(), rotation='vertical') _plt.xlabel('Categorical Column names') _plt.ylabel('# of Unique Categorical values') _plt.title('Categorical Columns and their unique categorical value counts') _plt.tight_layout() _plt.show() print() # Correlation Matrix... corr = df.corr() fig, ax = _plt.subplots(figsize=(8, 8)) cb = ax.matshow(corr, interpolation='nearest', cmap=cm.Blues) fig.colorbar(cb) _plt.xticks(range(len(corr.columns)), corr.columns, rotation='vertical') _plt.yticks(range(len(corr.columns)), corr.columns) _plt.xlabel('Correlation Matrix') _plt.tight_layout() _plt.show() print()
mit
Teamxrtc/webrtc-streaming-node
third_party/depot_tools/external_bin/gsutil/gsutil_4.15/gsutil/third_party/boto/boto/gs/acl.py
184
11457
# Copyright 2010 Google Inc. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from boto.gs.user import User from boto.exception import InvalidAclError ACCESS_CONTROL_LIST = 'AccessControlList' ALL_AUTHENTICATED_USERS = 'AllAuthenticatedUsers' ALL_USERS = 'AllUsers' DISPLAY_NAME = 'DisplayName' DOMAIN = 'Domain' EMAIL_ADDRESS = 'EmailAddress' ENTRY = 'Entry' ENTRIES = 'Entries' GROUP_BY_DOMAIN = 'GroupByDomain' GROUP_BY_EMAIL = 'GroupByEmail' GROUP_BY_ID = 'GroupById' ID = 'ID' NAME = 'Name' OWNER = 'Owner' PERMISSION = 'Permission' SCOPE = 'Scope' TYPE = 'type' USER_BY_EMAIL = 'UserByEmail' USER_BY_ID = 'UserById' CannedACLStrings = ['private', 'public-read', 'project-private', 'public-read-write', 'authenticated-read', 'bucket-owner-read', 'bucket-owner-full-control'] """A list of Google Cloud Storage predefined (canned) ACL strings.""" SupportedPermissions = ['READ', 'WRITE', 'FULL_CONTROL'] """A list of supported ACL permissions.""" class ACL(object): def __init__(self, parent=None): self.parent = parent self.entries = Entries(self) @property def acl(self): return self def __repr__(self): # Owner is optional in GS ACLs. if hasattr(self, 'owner'): entries_repr = ['Owner:%s' % self.owner.__repr__()] else: entries_repr = [''] acl_entries = self.entries if acl_entries: for e in acl_entries.entry_list: entries_repr.append(e.__repr__()) return '<%s>' % ', '.join(entries_repr) # Method with same signature as boto.s3.acl.ACL.add_email_grant(), to allow # polymorphic treatment at application layer. def add_email_grant(self, permission, email_address): entry = Entry(type=USER_BY_EMAIL, email_address=email_address, permission=permission) self.entries.entry_list.append(entry) # Method with same signature as boto.s3.acl.ACL.add_user_grant(), to allow # polymorphic treatment at application layer. def add_user_grant(self, permission, user_id): entry = Entry(permission=permission, type=USER_BY_ID, id=user_id) self.entries.entry_list.append(entry) def add_group_email_grant(self, permission, email_address): entry = Entry(type=GROUP_BY_EMAIL, email_address=email_address, permission=permission) self.entries.entry_list.append(entry) def add_group_grant(self, permission, group_id): entry = Entry(type=GROUP_BY_ID, id=group_id, permission=permission) self.entries.entry_list.append(entry) def startElement(self, name, attrs, connection): if name.lower() == OWNER.lower(): self.owner = User(self) return self.owner elif name.lower() == ENTRIES.lower(): self.entries = Entries(self) return self.entries else: return None def endElement(self, name, value, connection): if name.lower() == OWNER.lower(): pass elif name.lower() == ENTRIES.lower(): pass else: setattr(self, name, value) def to_xml(self): s = '<%s>' % ACCESS_CONTROL_LIST # Owner is optional in GS ACLs. if hasattr(self, 'owner'): s += self.owner.to_xml() acl_entries = self.entries if acl_entries: s += acl_entries.to_xml() s += '</%s>' % ACCESS_CONTROL_LIST return s class Entries(object): def __init__(self, parent=None): self.parent = parent # Entries is the class that represents the same-named XML # element. entry_list is the list within this class that holds the data. self.entry_list = [] def __repr__(self): entries_repr = [] for e in self.entry_list: entries_repr.append(e.__repr__()) return '<Entries: %s>' % ', '.join(entries_repr) def startElement(self, name, attrs, connection): if name.lower() == ENTRY.lower(): entry = Entry(self) self.entry_list.append(entry) return entry else: return None def endElement(self, name, value, connection): if name.lower() == ENTRY.lower(): pass else: setattr(self, name, value) def to_xml(self): if not self.entry_list: return '' s = '<%s>' % ENTRIES for entry in self.entry_list: s += entry.to_xml() s += '</%s>' % ENTRIES return s # Class that represents a single (Scope, Permission) entry in an ACL. class Entry(object): def __init__(self, scope=None, type=None, id=None, name=None, email_address=None, domain=None, permission=None): if not scope: scope = Scope(self, type, id, name, email_address, domain) self.scope = scope self.permission = permission def __repr__(self): return '<%s: %s>' % (self.scope.__repr__(), self.permission.__repr__()) def startElement(self, name, attrs, connection): if name.lower() == SCOPE.lower(): # The following if statement used to look like this: # if not TYPE in attrs: # which caused problems because older versions of the # AttributesImpl class in the xml.sax library neglected to include # a __contains__() method (which Python calls to implement the # 'in' operator). So when you use the in operator, like the if # statement above, Python invokes the __getiter__() method with # index 0, which raises an exception. More recent versions of # xml.sax include the __contains__() method, rendering the in # operator functional. The work-around here is to formulate the # if statement as below, which is the legal way to query # AttributesImpl for containment (and is also how the added # __contains__() method works). At one time gsutil disallowed # xmlplus-based parsers, until this more specific problem was # determined. if TYPE not in attrs: raise InvalidAclError('Missing "%s" in "%s" part of ACL' % (TYPE, SCOPE)) self.scope = Scope(self, attrs[TYPE]) return self.scope elif name.lower() == PERMISSION.lower(): pass else: return None def endElement(self, name, value, connection): if name.lower() == SCOPE.lower(): pass elif name.lower() == PERMISSION.lower(): value = value.strip() if not value in SupportedPermissions: raise InvalidAclError('Invalid Permission "%s"' % value) self.permission = value else: setattr(self, name, value) def to_xml(self): s = '<%s>' % ENTRY s += self.scope.to_xml() s += '<%s>%s</%s>' % (PERMISSION, self.permission, PERMISSION) s += '</%s>' % ENTRY return s class Scope(object): # Map from Scope type.lower() to lower-cased list of allowed sub-elems. ALLOWED_SCOPE_TYPE_SUB_ELEMS = { ALL_AUTHENTICATED_USERS.lower() : [], ALL_USERS.lower() : [], GROUP_BY_DOMAIN.lower() : [DOMAIN.lower()], GROUP_BY_EMAIL.lower() : [ DISPLAY_NAME.lower(), EMAIL_ADDRESS.lower(), NAME.lower()], GROUP_BY_ID.lower() : [DISPLAY_NAME.lower(), ID.lower(), NAME.lower()], USER_BY_EMAIL.lower() : [ DISPLAY_NAME.lower(), EMAIL_ADDRESS.lower(), NAME.lower()], USER_BY_ID.lower() : [DISPLAY_NAME.lower(), ID.lower(), NAME.lower()] } def __init__(self, parent, type=None, id=None, name=None, email_address=None, domain=None): self.parent = parent self.type = type self.name = name self.id = id self.domain = domain self.email_address = email_address if self.type.lower() not in self.ALLOWED_SCOPE_TYPE_SUB_ELEMS: raise InvalidAclError('Invalid %s %s "%s" ' % (SCOPE, TYPE, self.type)) def __repr__(self): named_entity = None if self.id: named_entity = self.id elif self.email_address: named_entity = self.email_address elif self.domain: named_entity = self.domain if named_entity: return '<%s: %s>' % (self.type, named_entity) else: return '<%s>' % self.type def startElement(self, name, attrs, connection): if (not name.lower() in self.ALLOWED_SCOPE_TYPE_SUB_ELEMS[self.type.lower()]): raise InvalidAclError('Element "%s" not allowed in %s %s "%s" ' % (name, SCOPE, TYPE, self.type)) return None def endElement(self, name, value, connection): value = value.strip() if name.lower() == DOMAIN.lower(): self.domain = value elif name.lower() == EMAIL_ADDRESS.lower(): self.email_address = value elif name.lower() == ID.lower(): self.id = value elif name.lower() == NAME.lower(): self.name = value else: setattr(self, name, value) def to_xml(self): s = '<%s type="%s">' % (SCOPE, self.type) if (self.type.lower() == ALL_AUTHENTICATED_USERS.lower() or self.type.lower() == ALL_USERS.lower()): pass elif self.type.lower() == GROUP_BY_DOMAIN.lower(): s += '<%s>%s</%s>' % (DOMAIN, self.domain, DOMAIN) elif (self.type.lower() == GROUP_BY_EMAIL.lower() or self.type.lower() == USER_BY_EMAIL.lower()): s += '<%s>%s</%s>' % (EMAIL_ADDRESS, self.email_address, EMAIL_ADDRESS) if self.name: s += '<%s>%s</%s>' % (NAME, self.name, NAME) elif (self.type.lower() == GROUP_BY_ID.lower() or self.type.lower() == USER_BY_ID.lower()): s += '<%s>%s</%s>' % (ID, self.id, ID) if self.name: s += '<%s>%s</%s>' % (NAME, self.name, NAME) else: raise InvalidAclError('Invalid scope type "%s" ', self.type) s += '</%s>' % SCOPE return s
mit
yodalee/servo
tests/wpt/web-platform-tests/tools/pytest/testing/code/test_source.py
171
17821
# flake8: noqa # disable flake check on this file because some constructs are strange # or redundant on purpose and can't be disable on a line-by-line basis import sys import _pytest._code import py import pytest from _pytest._code import Source from _pytest._code.source import _ast if _ast is not None: astonly = pytest.mark.nothing else: astonly = pytest.mark.xfail("True", reason="only works with AST-compile") failsonjython = pytest.mark.xfail("sys.platform.startswith('java')") def test_source_str_function(): x = Source("3") assert str(x) == "3" x = Source(" 3") assert str(x) == "3" x = Source(""" 3 """, rstrip=False) assert str(x) == "\n3\n " x = Source(""" 3 """, rstrip=True) assert str(x) == "\n3" def test_unicode(): try: unicode except NameError: return x = Source(unicode("4")) assert str(x) == "4" co = _pytest._code.compile(unicode('u"\xc3\xa5"', 'utf8'), mode='eval') val = eval(co) assert isinstance(val, unicode) def test_source_from_function(): source = _pytest._code.Source(test_source_str_function) assert str(source).startswith('def test_source_str_function():') def test_source_from_method(): class TestClass: def test_method(self): pass source = _pytest._code.Source(TestClass().test_method) assert source.lines == ["def test_method(self):", " pass"] def test_source_from_lines(): lines = ["a \n", "b\n", "c"] source = _pytest._code.Source(lines) assert source.lines == ['a ', 'b', 'c'] def test_source_from_inner_function(): def f(): pass source = _pytest._code.Source(f, deindent=False) assert str(source).startswith(' def f():') source = _pytest._code.Source(f) assert str(source).startswith('def f():') def test_source_putaround_simple(): source = Source("raise ValueError") source = source.putaround( "try:", """\ except ValueError: x = 42 else: x = 23""") assert str(source)=="""\ try: raise ValueError except ValueError: x = 42 else: x = 23""" def test_source_putaround(): source = Source() source = source.putaround(""" if 1: x=1 """) assert str(source).strip() == "if 1:\n x=1" def test_source_strips(): source = Source("") assert source == Source() assert str(source) == '' assert source.strip() == source def test_source_strip_multiline(): source = Source() source.lines = ["", " hello", " "] source2 = source.strip() assert source2.lines == [" hello"] def test_syntaxerror_rerepresentation(): ex = pytest.raises(SyntaxError, _pytest._code.compile, 'xyz xyz') assert ex.value.lineno == 1 assert ex.value.offset in (4,7) # XXX pypy/jython versus cpython? assert ex.value.text.strip(), 'x x' def test_isparseable(): assert Source("hello").isparseable() assert Source("if 1:\n pass").isparseable() assert Source(" \nif 1:\n pass").isparseable() assert not Source("if 1:\n").isparseable() assert not Source(" \nif 1:\npass").isparseable() assert not Source(chr(0)).isparseable() class TestAccesses: source = Source("""\ def f(x): pass def g(x): pass """) def test_getrange(self): x = self.source[0:2] assert x.isparseable() assert len(x.lines) == 2 assert str(x) == "def f(x):\n pass" def test_getline(self): x = self.source[0] assert x == "def f(x):" def test_len(self): assert len(self.source) == 4 def test_iter(self): l = [x for x in self.source] assert len(l) == 4 class TestSourceParsingAndCompiling: source = Source("""\ def f(x): assert (x == 3 + 4) """).strip() def test_compile(self): co = _pytest._code.compile("x=3") d = {} exec (co, d) assert d['x'] == 3 def test_compile_and_getsource_simple(self): co = _pytest._code.compile("x=3") exec (co) source = _pytest._code.Source(co) assert str(source) == "x=3" def test_compile_and_getsource_through_same_function(self): def gensource(source): return _pytest._code.compile(source) co1 = gensource(""" def f(): raise KeyError() """) co2 = gensource(""" def f(): raise ValueError() """) source1 = py.std.inspect.getsource(co1) assert 'KeyError' in source1 source2 = py.std.inspect.getsource(co2) assert 'ValueError' in source2 def test_getstatement(self): #print str(self.source) ass = str(self.source[1:]) for i in range(1, 4): #print "trying start in line %r" % self.source[i] s = self.source.getstatement(i) #x = s.deindent() assert str(s) == ass def test_getstatementrange_triple_quoted(self): #print str(self.source) source = Source("""hello(''' ''')""") s = source.getstatement(0) assert s == str(source) s = source.getstatement(1) assert s == str(source) @astonly def test_getstatementrange_within_constructs(self): source = Source("""\ try: try: raise ValueError except SomeThing: pass finally: 42 """) assert len(source) == 7 # check all lineno's that could occur in a traceback #assert source.getstatementrange(0) == (0, 7) #assert source.getstatementrange(1) == (1, 5) assert source.getstatementrange(2) == (2, 3) assert source.getstatementrange(3) == (3, 4) assert source.getstatementrange(4) == (4, 5) #assert source.getstatementrange(5) == (0, 7) assert source.getstatementrange(6) == (6, 7) def test_getstatementrange_bug(self): source = Source("""\ try: x = ( y + z) except: pass """) assert len(source) == 6 assert source.getstatementrange(2) == (1, 4) def test_getstatementrange_bug2(self): source = Source("""\ assert ( 33 == [ X(3, b=1, c=2 ), ] ) """) assert len(source) == 9 assert source.getstatementrange(5) == (0, 9) def test_getstatementrange_ast_issue58(self): source = Source("""\ def test_some(): for a in [a for a in CAUSE_ERROR]: pass x = 3 """) assert getstatement(2, source).lines == source.lines[2:3] assert getstatement(3, source).lines == source.lines[3:4] @pytest.mark.skipif("sys.version_info < (2,6)") def test_getstatementrange_out_of_bounds_py3(self): source = Source("if xxx:\n from .collections import something") r = source.getstatementrange(1) assert r == (1,2) def test_getstatementrange_with_syntaxerror_issue7(self): source = Source(":") pytest.raises(SyntaxError, lambda: source.getstatementrange(0)) @pytest.mark.skipif("sys.version_info < (2,6)") def test_compile_to_ast(self): import ast source = Source("x = 4") mod = source.compile(flag=ast.PyCF_ONLY_AST) assert isinstance(mod, ast.Module) compile(mod, "<filename>", "exec") def test_compile_and_getsource(self): co = self.source.compile() py.builtin.exec_(co, globals()) f(7) excinfo = pytest.raises(AssertionError, "f(6)") frame = excinfo.traceback[-1].frame stmt = frame.code.fullsource.getstatement(frame.lineno) #print "block", str(block) assert str(stmt).strip().startswith('assert') def test_compilefuncs_and_path_sanity(self): def check(comp, name): co = comp(self.source, name) if not name: expected = "codegen %s:%d>" %(mypath, mylineno+2+1) else: expected = "codegen %r %s:%d>" % (name, mypath, mylineno+2+1) fn = co.co_filename assert fn.endswith(expected) mycode = _pytest._code.Code(self.test_compilefuncs_and_path_sanity) mylineno = mycode.firstlineno mypath = mycode.path for comp in _pytest._code.compile, _pytest._code.Source.compile: for name in '', None, 'my': yield check, comp, name def test_offsetless_synerr(self): pytest.raises(SyntaxError, _pytest._code.compile, "lambda a,a: 0", mode='eval') def test_getstartingblock_singleline(): class A: def __init__(self, *args): frame = sys._getframe(1) self.source = _pytest._code.Frame(frame).statement x = A('x', 'y') l = [i for i in x.source.lines if i.strip()] assert len(l) == 1 def test_getstartingblock_multiline(): class A: def __init__(self, *args): frame = sys._getframe(1) self.source = _pytest._code.Frame(frame).statement x = A('x', 'y' \ , 'z') l = [i for i in x.source.lines if i.strip()] assert len(l) == 4 def test_getline_finally(): def c(): pass excinfo = pytest.raises(TypeError, """ teardown = None try: c(1) finally: if teardown: teardown() """) source = excinfo.traceback[-1].statement assert str(source).strip() == 'c(1)' def test_getfuncsource_dynamic(): source = """ def f(): raise ValueError def g(): pass """ co = _pytest._code.compile(source) py.builtin.exec_(co, globals()) assert str(_pytest._code.Source(f)).strip() == 'def f():\n raise ValueError' assert str(_pytest._code.Source(g)).strip() == 'def g(): pass' def test_getfuncsource_with_multine_string(): def f(): c = '''while True: pass ''' assert str(_pytest._code.Source(f)).strip() == "def f():\n c = '''while True:\n pass\n'''" def test_deindent(): from _pytest._code.source import deindent as deindent assert deindent(['\tfoo', '\tbar', ]) == ['foo', 'bar'] def f(): c = '''while True: pass ''' import inspect lines = deindent(inspect.getsource(f).splitlines()) assert lines == ["def f():", " c = '''while True:", " pass", "'''"] source = """ def f(): def g(): pass """ lines = deindent(source.splitlines()) assert lines == ['', 'def f():', ' def g():', ' pass', ' '] @pytest.mark.xfail("sys.version_info[:3] < (2,7,0) or " "((3,0) <= sys.version_info[:2] < (3,2))") def test_source_of_class_at_eof_without_newline(tmpdir): # this test fails because the implicit inspect.getsource(A) below # does not return the "x = 1" last line. source = _pytest._code.Source(''' class A(object): def method(self): x = 1 ''') path = tmpdir.join("a.py") path.write(source) s2 = _pytest._code.Source(tmpdir.join("a.py").pyimport().A) assert str(source).strip() == str(s2).strip() if True: def x(): pass def test_getsource_fallback(): from _pytest._code.source import getsource expected = """def x(): pass""" src = getsource(x) assert src == expected def test_idem_compile_and_getsource(): from _pytest._code.source import getsource expected = "def x(): pass" co = _pytest._code.compile(expected) src = getsource(co) assert src == expected def test_findsource_fallback(): from _pytest._code.source import findsource src, lineno = findsource(x) assert 'test_findsource_simple' in str(src) assert src[lineno] == ' def x():' def test_findsource(): from _pytest._code.source import findsource co = _pytest._code.compile("""if 1: def x(): pass """) src, lineno = findsource(co) assert 'if 1:' in str(src) d = {} eval(co, d) src, lineno = findsource(d['x']) assert 'if 1:' in str(src) assert src[lineno] == " def x():" def test_getfslineno(): from _pytest._code import getfslineno def f(x): pass fspath, lineno = getfslineno(f) assert fspath.basename == "test_source.py" assert lineno == _pytest._code.getrawcode(f).co_firstlineno - 1 # see findsource class A(object): pass fspath, lineno = getfslineno(A) _, A_lineno = py.std.inspect.findsource(A) assert fspath.basename == "test_source.py" assert lineno == A_lineno assert getfslineno(3) == ("", -1) class B: pass B.__name__ = "B2" assert getfslineno(B)[1] == -1 def test_code_of_object_instance_with_call(): class A: pass pytest.raises(TypeError, lambda: _pytest._code.Source(A())) class WithCall: def __call__(self): pass code = _pytest._code.Code(WithCall()) assert 'pass' in str(code.source()) class Hello(object): def __call__(self): pass pytest.raises(TypeError, lambda: _pytest._code.Code(Hello)) def getstatement(lineno, source): from _pytest._code.source import getstatementrange_ast source = _pytest._code.Source(source, deindent=False) ast, start, end = getstatementrange_ast(lineno, source) return source[start:end] def test_oneline(): source = getstatement(0, "raise ValueError") assert str(source) == "raise ValueError" def test_comment_and_no_newline_at_end(): from _pytest._code.source import getstatementrange_ast source = Source(['def test_basic_complex():', ' assert 1 == 2', '# vim: filetype=pyopencl:fdm=marker']) ast, start, end = getstatementrange_ast(1, source) assert end == 2 def test_oneline_and_comment(): source = getstatement(0, "raise ValueError\n#hello") assert str(source) == "raise ValueError" @pytest.mark.xfail(hasattr(sys, "pypy_version_info"), reason='does not work on pypy') def test_comments(): source = '''def test(): "comment 1" x = 1 # comment 2 # comment 3 assert False """ comment 4 """ ''' for line in range(2,6): assert str(getstatement(line, source)) == ' x = 1' for line in range(6,10): assert str(getstatement(line, source)) == ' assert False' assert str(getstatement(10, source)) == '"""' def test_comment_in_statement(): source = '''test(foo=1, # comment 1 bar=2) ''' for line in range(1,3): assert str(getstatement(line, source)) == \ 'test(foo=1,\n # comment 1\n bar=2)' def test_single_line_else(): source = getstatement(1, "if False: 2\nelse: 3") assert str(source) == "else: 3" def test_single_line_finally(): source = getstatement(1, "try: 1\nfinally: 3") assert str(source) == "finally: 3" def test_issue55(): source = ('def round_trip(dinp):\n assert 1 == dinp\n' 'def test_rt():\n round_trip("""\n""")\n') s = getstatement(3, source) assert str(s) == ' round_trip("""\n""")' def XXXtest_multiline(): source = getstatement(0, """\ raise ValueError( 23 ) x = 3 """) assert str(source) == "raise ValueError(\n 23\n)" class TestTry: pytestmark = astonly source = """\ try: raise ValueError except Something: raise IndexError(1) else: raise KeyError() """ def test_body(self): source = getstatement(1, self.source) assert str(source) == " raise ValueError" def test_except_line(self): source = getstatement(2, self.source) assert str(source) == "except Something:" def test_except_body(self): source = getstatement(3, self.source) assert str(source) == " raise IndexError(1)" def test_else(self): source = getstatement(5, self.source) assert str(source) == " raise KeyError()" class TestTryFinally: source = """\ try: raise ValueError finally: raise IndexError(1) """ def test_body(self): source = getstatement(1, self.source) assert str(source) == " raise ValueError" def test_finally(self): source = getstatement(3, self.source) assert str(source) == " raise IndexError(1)" class TestIf: pytestmark = astonly source = """\ if 1: y = 3 elif False: y = 5 else: y = 7 """ def test_body(self): source = getstatement(1, self.source) assert str(source) == " y = 3" def test_elif_clause(self): source = getstatement(2, self.source) assert str(source) == "elif False:" def test_elif(self): source = getstatement(3, self.source) assert str(source) == " y = 5" def test_else(self): source = getstatement(5, self.source) assert str(source) == " y = 7" def test_semicolon(): s = """\ hello ; pytest.skip() """ source = getstatement(0, s) assert str(source) == s.strip() def test_def_online(): s = """\ def func(): raise ValueError(42) def something(): pass """ source = getstatement(0, s) assert str(source) == "def func(): raise ValueError(42)" def XXX_test_expression_multiline(): source = """\ something ''' '''""" result = getstatement(1, source) assert str(result) == "'''\n'''"
mpl-2.0
bgris/ODL_bgris
lib/python3.5/site-packages/pip/_vendor/requests/packages/urllib3/fields.py
514
5931
from __future__ import absolute_import import email.utils import mimetypes from .packages import six def guess_content_type(filename, default='application/octet-stream'): """ Guess the "Content-Type" of a file. :param filename: The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`. """ if filename: return mimetypes.guess_type(filename)[0] or default return default def format_header_param(name, value): """ Helper function to format and quote a single header parameter. Particularly useful for header parameters which might contain non-ASCII values, like file names. This follows RFC 2231, as suggested by RFC 2388 Section 4.4. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. """ if not any(ch in value for ch in '"\\\r\n'): result = '%s="%s"' % (name, value) try: result.encode('ascii') except (UnicodeEncodeError, UnicodeDecodeError): pass else: return result if not six.PY3 and isinstance(value, six.text_type): # Python 2: value = value.encode('utf-8') value = email.utils.encode_rfc2231(value, 'utf-8') value = '%s*=%s' % (name, value) return value class RequestField(object): """ A data container for request body parameters. :param name: The name of this request field. :param data: The data/value body. :param filename: An optional filename of the request field. :param headers: An optional dict-like object of headers to initially use for the field. """ def __init__(self, name, data, filename=None, headers=None): self._name = name self._filename = filename self.data = data self.headers = {} if headers: self.headers = dict(headers) @classmethod def from_tuples(cls, fieldname, value): """ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', Field names and filenames must be unicode. """ if isinstance(value, tuple): if len(value) == 3: filename, data, content_type = value else: filename, data = value content_type = guess_content_type(filename) else: filename = None content_type = None data = value request_param = cls(fieldname, data, filename=filename) request_param.make_multipart(content_type=content_type) return request_param def _render_part(self, name, value): """ Overridable helper function to format a single header parameter. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. """ return format_header_param(name, value) def _render_parts(self, header_parts): """ Helper function to format and quote a single header. Useful for single headers that are composed of multiple items. E.g., 'Content-Disposition' fields. :param header_parts: A sequence of (k, v) typles or a :class:`dict` of (k, v) to format as `k1="v1"; k2="v2"; ...`. """ parts = [] iterable = header_parts if isinstance(header_parts, dict): iterable = header_parts.items() for name, value in iterable: if value: parts.append(self._render_part(name, value)) return '; '.join(parts) def render_headers(self): """ Renders the headers for this request field. """ lines = [] sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] for sort_key in sort_keys: if self.headers.get(sort_key, False): lines.append('%s: %s' % (sort_key, self.headers[sort_key])) for header_name, header_value in self.headers.items(): if header_name not in sort_keys: if header_value: lines.append('%s: %s' % (header_name, header_value)) lines.append('\r\n') return '\r\n'.join(lines) def make_multipart(self, content_disposition=None, content_type=None, content_location=None): """ Makes this request field into a multipart request field. This method overrides "Content-Disposition", "Content-Type" and "Content-Location" headers to the request parameter. :param content_type: The 'Content-Type' of the request body. :param content_location: The 'Content-Location' of the request body. """ self.headers['Content-Disposition'] = content_disposition or 'form-data' self.headers['Content-Disposition'] += '; '.join([ '', self._render_parts( (('name', self._name), ('filename', self._filename)) ) ]) self.headers['Content-Type'] = content_type self.headers['Content-Location'] = content_location
gpl-3.0
joelddiaz/openshift-tools
openshift/installer/vendored/openshift-ansible-3.5.127/roles/openshift_certificate_expiry/test/test_load_and_handle_cert.py
78
2030
''' Unit tests for the load_and_handle_cert method ''' import datetime import os import sys import pytest MODULE_PATH = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir, 'library')) sys.path.insert(1, MODULE_PATH) # pylint: disable=import-error,wrong-import-position,missing-docstring # pylint: disable=invalid-name,redefined-outer-name import openshift_cert_expiry # noqa: E402 # TODO: More testing on the results of the load_and_handle_cert function # could be implemented here as well, such as verifying subjects # match up. @pytest.fixture(params=['OpenSSLCertificate', 'FakeOpenSSLCertificate']) def loaded_cert(request, valid_cert): """ parameterized fixture to provide load_and_handle_cert results for both OpenSSL and FakeOpenSSL parsed certificates """ now = datetime.datetime.now() openshift_cert_expiry.HAS_OPENSSL = request.param == 'OpenSSLCertificate' # valid_cert['cert_file'] is a `py.path.LocalPath` object and # provides a read_text() method for reading the file contents. cert_string = valid_cert['cert_file'].read_text('utf8') (subject, expiry_date, time_remaining, serial) = openshift_cert_expiry.load_and_handle_cert(cert_string, now) return { 'now': now, 'subject': subject, 'expiry_date': expiry_date, 'time_remaining': time_remaining, 'serial': serial, } def test_serial(loaded_cert, valid_cert): """Params: * `loaded_cert` comes from the `loaded_cert` fixture in this file * `valid_cert` comes from the 'valid_cert' fixture in conftest.py """ valid_cert_serial = valid_cert['cert'].get_serial_number() assert loaded_cert['serial'] == valid_cert_serial def test_expiry(loaded_cert): """Params: * `loaded_cert` comes from the `loaded_cert` fixture in this file """ expiry_date = loaded_cert['expiry_date'] time_remaining = loaded_cert['time_remaining'] now = loaded_cert['now'] assert expiry_date == now + time_remaining
apache-2.0
BigBrother1984/android_external_chromium_org
third_party/protobuf/python/google/protobuf/internal/text_format_test.py
162
23727
#! /usr/bin/python # # Protocol Buffers - Google's data interchange format # Copyright 2008 Google Inc. All rights reserved. # http://code.google.com/p/protobuf/ # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Test for google.protobuf.text_format.""" __author__ = 'kenton@google.com (Kenton Varda)' import difflib import re import unittest from google.protobuf import text_format from google.protobuf.internal import test_util from google.protobuf import unittest_pb2 from google.protobuf import unittest_mset_pb2 class TextFormatTest(unittest.TestCase): def ReadGolden(self, golden_filename): f = test_util.GoldenFile(golden_filename) golden_lines = f.readlines() f.close() return golden_lines def CompareToGoldenFile(self, text, golden_filename): golden_lines = self.ReadGolden(golden_filename) self.CompareToGoldenLines(text, golden_lines) def CompareToGoldenText(self, text, golden_text): self.CompareToGoldenLines(text, golden_text.splitlines(1)) def CompareToGoldenLines(self, text, golden_lines): actual_lines = text.splitlines(1) self.assertEqual(golden_lines, actual_lines, "Text doesn't match golden. Diff:\n" + ''.join(difflib.ndiff(golden_lines, actual_lines))) def testPrintAllFields(self): message = unittest_pb2.TestAllTypes() test_util.SetAllFields(message) self.CompareToGoldenFile( self.RemoveRedundantZeros(text_format.MessageToString(message)), 'text_format_unittest_data.txt') def testPrintAllExtensions(self): message = unittest_pb2.TestAllExtensions() test_util.SetAllExtensions(message) self.CompareToGoldenFile( self.RemoveRedundantZeros(text_format.MessageToString(message)), 'text_format_unittest_extensions_data.txt') def testPrintMessageSet(self): message = unittest_mset_pb2.TestMessageSetContainer() ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension message.message_set.Extensions[ext1].i = 23 message.message_set.Extensions[ext2].str = 'foo' self.CompareToGoldenText(text_format.MessageToString(message), 'message_set {\n' ' [protobuf_unittest.TestMessageSetExtension1] {\n' ' i: 23\n' ' }\n' ' [protobuf_unittest.TestMessageSetExtension2] {\n' ' str: \"foo\"\n' ' }\n' '}\n') def testPrintBadEnumValue(self): message = unittest_pb2.TestAllTypes() message.optional_nested_enum = 100 message.optional_foreign_enum = 101 message.optional_import_enum = 102 self.CompareToGoldenText( text_format.MessageToString(message), 'optional_nested_enum: 100\n' 'optional_foreign_enum: 101\n' 'optional_import_enum: 102\n') def testPrintBadEnumValueExtensions(self): message = unittest_pb2.TestAllExtensions() message.Extensions[unittest_pb2.optional_nested_enum_extension] = 100 message.Extensions[unittest_pb2.optional_foreign_enum_extension] = 101 message.Extensions[unittest_pb2.optional_import_enum_extension] = 102 self.CompareToGoldenText( text_format.MessageToString(message), '[protobuf_unittest.optional_nested_enum_extension]: 100\n' '[protobuf_unittest.optional_foreign_enum_extension]: 101\n' '[protobuf_unittest.optional_import_enum_extension]: 102\n') def testPrintExotic(self): message = unittest_pb2.TestAllTypes() message.repeated_int64.append(-9223372036854775808) message.repeated_uint64.append(18446744073709551615) message.repeated_double.append(123.456) message.repeated_double.append(1.23e22) message.repeated_double.append(1.23e-18) message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"') message.repeated_string.append(u'\u00fc\ua71f') self.CompareToGoldenText( self.RemoveRedundantZeros(text_format.MessageToString(message)), 'repeated_int64: -9223372036854775808\n' 'repeated_uint64: 18446744073709551615\n' 'repeated_double: 123.456\n' 'repeated_double: 1.23e+22\n' 'repeated_double: 1.23e-18\n' 'repeated_string: ' '"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""\n' 'repeated_string: "\\303\\274\\352\\234\\237"\n') def testPrintNestedMessageAsOneLine(self): message = unittest_pb2.TestAllTypes() msg = message.repeated_nested_message.add() msg.bb = 42; self.CompareToGoldenText( text_format.MessageToString(message, as_one_line=True), 'repeated_nested_message { bb: 42 }') def testPrintRepeatedFieldsAsOneLine(self): message = unittest_pb2.TestAllTypes() message.repeated_int32.append(1) message.repeated_int32.append(1) message.repeated_int32.append(3) message.repeated_string.append("Google") message.repeated_string.append("Zurich") self.CompareToGoldenText( text_format.MessageToString(message, as_one_line=True), 'repeated_int32: 1 repeated_int32: 1 repeated_int32: 3 ' 'repeated_string: "Google" repeated_string: "Zurich"') def testPrintNestedNewLineInStringAsOneLine(self): message = unittest_pb2.TestAllTypes() message.optional_string = "a\nnew\nline" self.CompareToGoldenText( text_format.MessageToString(message, as_one_line=True), 'optional_string: "a\\nnew\\nline"') def testPrintMessageSetAsOneLine(self): message = unittest_mset_pb2.TestMessageSetContainer() ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension message.message_set.Extensions[ext1].i = 23 message.message_set.Extensions[ext2].str = 'foo' self.CompareToGoldenText( text_format.MessageToString(message, as_one_line=True), 'message_set {' ' [protobuf_unittest.TestMessageSetExtension1] {' ' i: 23' ' }' ' [protobuf_unittest.TestMessageSetExtension2] {' ' str: \"foo\"' ' }' ' }') def testPrintExoticAsOneLine(self): message = unittest_pb2.TestAllTypes() message.repeated_int64.append(-9223372036854775808) message.repeated_uint64.append(18446744073709551615) message.repeated_double.append(123.456) message.repeated_double.append(1.23e22) message.repeated_double.append(1.23e-18) message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"') message.repeated_string.append(u'\u00fc\ua71f') self.CompareToGoldenText( self.RemoveRedundantZeros( text_format.MessageToString(message, as_one_line=True)), 'repeated_int64: -9223372036854775808' ' repeated_uint64: 18446744073709551615' ' repeated_double: 123.456' ' repeated_double: 1.23e+22' ' repeated_double: 1.23e-18' ' repeated_string: ' '"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""' ' repeated_string: "\\303\\274\\352\\234\\237"') def testRoundTripExoticAsOneLine(self): message = unittest_pb2.TestAllTypes() message.repeated_int64.append(-9223372036854775808) message.repeated_uint64.append(18446744073709551615) message.repeated_double.append(123.456) message.repeated_double.append(1.23e22) message.repeated_double.append(1.23e-18) message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"') message.repeated_string.append(u'\u00fc\ua71f') # Test as_utf8 = False. wire_text = text_format.MessageToString( message, as_one_line=True, as_utf8=False) parsed_message = unittest_pb2.TestAllTypes() text_format.Merge(wire_text, parsed_message) self.assertEquals(message, parsed_message) # Test as_utf8 = True. wire_text = text_format.MessageToString( message, as_one_line=True, as_utf8=True) parsed_message = unittest_pb2.TestAllTypes() text_format.Merge(wire_text, parsed_message) self.assertEquals(message, parsed_message) def testPrintRawUtf8String(self): message = unittest_pb2.TestAllTypes() message.repeated_string.append(u'\u00fc\ua71f') text = text_format.MessageToString(message, as_utf8 = True) self.CompareToGoldenText(text, 'repeated_string: "\303\274\352\234\237"\n') parsed_message = unittest_pb2.TestAllTypes() text_format.Merge(text, parsed_message) self.assertEquals(message, parsed_message) def testMessageToString(self): message = unittest_pb2.ForeignMessage() message.c = 123 self.assertEqual('c: 123\n', str(message)) def RemoveRedundantZeros(self, text): # Some platforms print 1e+5 as 1e+005. This is fine, but we need to remove # these zeros in order to match the golden file. text = text.replace('e+0','e+').replace('e+0','e+') \ .replace('e-0','e-').replace('e-0','e-') # Floating point fields are printed with .0 suffix even if they are # actualy integer numbers. text = re.compile('\.0$', re.MULTILINE).sub('', text) return text def testMergeGolden(self): golden_text = '\n'.join(self.ReadGolden('text_format_unittest_data.txt')) parsed_message = unittest_pb2.TestAllTypes() text_format.Merge(golden_text, parsed_message) message = unittest_pb2.TestAllTypes() test_util.SetAllFields(message) self.assertEquals(message, parsed_message) def testMergeGoldenExtensions(self): golden_text = '\n'.join(self.ReadGolden( 'text_format_unittest_extensions_data.txt')) parsed_message = unittest_pb2.TestAllExtensions() text_format.Merge(golden_text, parsed_message) message = unittest_pb2.TestAllExtensions() test_util.SetAllExtensions(message) self.assertEquals(message, parsed_message) def testMergeAllFields(self): message = unittest_pb2.TestAllTypes() test_util.SetAllFields(message) ascii_text = text_format.MessageToString(message) parsed_message = unittest_pb2.TestAllTypes() text_format.Merge(ascii_text, parsed_message) self.assertEqual(message, parsed_message) test_util.ExpectAllFieldsSet(self, message) def testMergeAllExtensions(self): message = unittest_pb2.TestAllExtensions() test_util.SetAllExtensions(message) ascii_text = text_format.MessageToString(message) parsed_message = unittest_pb2.TestAllExtensions() text_format.Merge(ascii_text, parsed_message) self.assertEqual(message, parsed_message) def testMergeMessageSet(self): message = unittest_pb2.TestAllTypes() text = ('repeated_uint64: 1\n' 'repeated_uint64: 2\n') text_format.Merge(text, message) self.assertEqual(1, message.repeated_uint64[0]) self.assertEqual(2, message.repeated_uint64[1]) message = unittest_mset_pb2.TestMessageSetContainer() text = ('message_set {\n' ' [protobuf_unittest.TestMessageSetExtension1] {\n' ' i: 23\n' ' }\n' ' [protobuf_unittest.TestMessageSetExtension2] {\n' ' str: \"foo\"\n' ' }\n' '}\n') text_format.Merge(text, message) ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension self.assertEquals(23, message.message_set.Extensions[ext1].i) self.assertEquals('foo', message.message_set.Extensions[ext2].str) def testMergeExotic(self): message = unittest_pb2.TestAllTypes() text = ('repeated_int64: -9223372036854775808\n' 'repeated_uint64: 18446744073709551615\n' 'repeated_double: 123.456\n' 'repeated_double: 1.23e+22\n' 'repeated_double: 1.23e-18\n' 'repeated_string: \n' '"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""\n' 'repeated_string: "foo" \'corge\' "grault"\n' 'repeated_string: "\\303\\274\\352\\234\\237"\n' 'repeated_string: "\\xc3\\xbc"\n' 'repeated_string: "\xc3\xbc"\n') text_format.Merge(text, message) self.assertEqual(-9223372036854775808, message.repeated_int64[0]) self.assertEqual(18446744073709551615, message.repeated_uint64[0]) self.assertEqual(123.456, message.repeated_double[0]) self.assertEqual(1.23e22, message.repeated_double[1]) self.assertEqual(1.23e-18, message.repeated_double[2]) self.assertEqual( '\000\001\a\b\f\n\r\t\v\\\'"', message.repeated_string[0]) self.assertEqual('foocorgegrault', message.repeated_string[1]) self.assertEqual(u'\u00fc\ua71f', message.repeated_string[2]) self.assertEqual(u'\u00fc', message.repeated_string[3]) def testMergeEmptyText(self): message = unittest_pb2.TestAllTypes() text = '' text_format.Merge(text, message) self.assertEquals(unittest_pb2.TestAllTypes(), message) def testMergeInvalidUtf8(self): message = unittest_pb2.TestAllTypes() text = 'repeated_string: "\\xc3\\xc3"' self.assertRaises(text_format.ParseError, text_format.Merge, text, message) def testMergeSingleWord(self): message = unittest_pb2.TestAllTypes() text = 'foo' self.assertRaisesWithMessage( text_format.ParseError, ('1:1 : Message type "protobuf_unittest.TestAllTypes" has no field named ' '"foo".'), text_format.Merge, text, message) def testMergeUnknownField(self): message = unittest_pb2.TestAllTypes() text = 'unknown_field: 8\n' self.assertRaisesWithMessage( text_format.ParseError, ('1:1 : Message type "protobuf_unittest.TestAllTypes" has no field named ' '"unknown_field".'), text_format.Merge, text, message) def testMergeBadExtension(self): message = unittest_pb2.TestAllExtensions() text = '[unknown_extension]: 8\n' self.assertRaisesWithMessage( text_format.ParseError, '1:2 : Extension "unknown_extension" not registered.', text_format.Merge, text, message) message = unittest_pb2.TestAllTypes() self.assertRaisesWithMessage( text_format.ParseError, ('1:2 : Message type "protobuf_unittest.TestAllTypes" does not have ' 'extensions.'), text_format.Merge, text, message) def testMergeGroupNotClosed(self): message = unittest_pb2.TestAllTypes() text = 'RepeatedGroup: <' self.assertRaisesWithMessage( text_format.ParseError, '1:16 : Expected ">".', text_format.Merge, text, message) text = 'RepeatedGroup: {' self.assertRaisesWithMessage( text_format.ParseError, '1:16 : Expected "}".', text_format.Merge, text, message) def testMergeEmptyGroup(self): message = unittest_pb2.TestAllTypes() text = 'OptionalGroup: {}' text_format.Merge(text, message) self.assertTrue(message.HasField('optionalgroup')) message.Clear() message = unittest_pb2.TestAllTypes() text = 'OptionalGroup: <>' text_format.Merge(text, message) self.assertTrue(message.HasField('optionalgroup')) def testMergeBadEnumValue(self): message = unittest_pb2.TestAllTypes() text = 'optional_nested_enum: BARR' self.assertRaisesWithMessage( text_format.ParseError, ('1:23 : Enum type "protobuf_unittest.TestAllTypes.NestedEnum" ' 'has no value named BARR.'), text_format.Merge, text, message) message = unittest_pb2.TestAllTypes() text = 'optional_nested_enum: 100' self.assertRaisesWithMessage( text_format.ParseError, ('1:23 : Enum type "protobuf_unittest.TestAllTypes.NestedEnum" ' 'has no value with number 100.'), text_format.Merge, text, message) def testMergeBadIntValue(self): message = unittest_pb2.TestAllTypes() text = 'optional_int32: bork' self.assertRaisesWithMessage( text_format.ParseError, ('1:17 : Couldn\'t parse integer: bork'), text_format.Merge, text, message) def assertRaisesWithMessage(self, e_class, e, func, *args, **kwargs): """Same as assertRaises, but also compares the exception message.""" if hasattr(e_class, '__name__'): exc_name = e_class.__name__ else: exc_name = str(e_class) try: func(*args, **kwargs) except e_class as expr: if str(expr) != e: msg = '%s raised, but with wrong message: "%s" instead of "%s"' raise self.failureException(msg % (exc_name, str(expr).encode('string_escape'), e.encode('string_escape'))) return else: raise self.failureException('%s not raised' % exc_name) class TokenizerTest(unittest.TestCase): def testSimpleTokenCases(self): text = ('identifier1:"string1"\n \n\n' 'identifier2 : \n \n123 \n identifier3 :\'string\'\n' 'identifiER_4 : 1.1e+2 ID5:-0.23 ID6:\'aaaa\\\'bbbb\'\n' 'ID7 : "aa\\"bb"\n\n\n\n ID8: {A:inf B:-inf C:true D:false}\n' 'ID9: 22 ID10: -111111111111111111 ID11: -22\n' 'ID12: 2222222222222222222 ID13: 1.23456f ID14: 1.2e+2f ' 'false_bool: 0 true_BOOL:t \n true_bool1: 1 false_BOOL1:f ' ) tokenizer = text_format._Tokenizer(text) methods = [(tokenizer.ConsumeIdentifier, 'identifier1'), ':', (tokenizer.ConsumeString, 'string1'), (tokenizer.ConsumeIdentifier, 'identifier2'), ':', (tokenizer.ConsumeInt32, 123), (tokenizer.ConsumeIdentifier, 'identifier3'), ':', (tokenizer.ConsumeString, 'string'), (tokenizer.ConsumeIdentifier, 'identifiER_4'), ':', (tokenizer.ConsumeFloat, 1.1e+2), (tokenizer.ConsumeIdentifier, 'ID5'), ':', (tokenizer.ConsumeFloat, -0.23), (tokenizer.ConsumeIdentifier, 'ID6'), ':', (tokenizer.ConsumeString, 'aaaa\'bbbb'), (tokenizer.ConsumeIdentifier, 'ID7'), ':', (tokenizer.ConsumeString, 'aa\"bb'), (tokenizer.ConsumeIdentifier, 'ID8'), ':', '{', (tokenizer.ConsumeIdentifier, 'A'), ':', (tokenizer.ConsumeFloat, float('inf')), (tokenizer.ConsumeIdentifier, 'B'), ':', (tokenizer.ConsumeFloat, -float('inf')), (tokenizer.ConsumeIdentifier, 'C'), ':', (tokenizer.ConsumeBool, True), (tokenizer.ConsumeIdentifier, 'D'), ':', (tokenizer.ConsumeBool, False), '}', (tokenizer.ConsumeIdentifier, 'ID9'), ':', (tokenizer.ConsumeUint32, 22), (tokenizer.ConsumeIdentifier, 'ID10'), ':', (tokenizer.ConsumeInt64, -111111111111111111), (tokenizer.ConsumeIdentifier, 'ID11'), ':', (tokenizer.ConsumeInt32, -22), (tokenizer.ConsumeIdentifier, 'ID12'), ':', (tokenizer.ConsumeUint64, 2222222222222222222), (tokenizer.ConsumeIdentifier, 'ID13'), ':', (tokenizer.ConsumeFloat, 1.23456), (tokenizer.ConsumeIdentifier, 'ID14'), ':', (tokenizer.ConsumeFloat, 1.2e+2), (tokenizer.ConsumeIdentifier, 'false_bool'), ':', (tokenizer.ConsumeBool, False), (tokenizer.ConsumeIdentifier, 'true_BOOL'), ':', (tokenizer.ConsumeBool, True), (tokenizer.ConsumeIdentifier, 'true_bool1'), ':', (tokenizer.ConsumeBool, True), (tokenizer.ConsumeIdentifier, 'false_BOOL1'), ':', (tokenizer.ConsumeBool, False)] i = 0 while not tokenizer.AtEnd(): m = methods[i] if type(m) == str: token = tokenizer.token self.assertEqual(token, m) tokenizer.NextToken() else: self.assertEqual(m[1], m[0]()) i += 1 def testConsumeIntegers(self): # This test only tests the failures in the integer parsing methods as well # as the '0' special cases. int64_max = (1 << 63) - 1 uint32_max = (1 << 32) - 1 text = '-1 %d %d' % (uint32_max + 1, int64_max + 1) tokenizer = text_format._Tokenizer(text) self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32) self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint64) self.assertEqual(-1, tokenizer.ConsumeInt32()) self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32) self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt32) self.assertEqual(uint32_max + 1, tokenizer.ConsumeInt64()) self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt64) self.assertEqual(int64_max + 1, tokenizer.ConsumeUint64()) self.assertTrue(tokenizer.AtEnd()) text = '-0 -0 0 0' tokenizer = text_format._Tokenizer(text) self.assertEqual(0, tokenizer.ConsumeUint32()) self.assertEqual(0, tokenizer.ConsumeUint64()) self.assertEqual(0, tokenizer.ConsumeUint32()) self.assertEqual(0, tokenizer.ConsumeUint64()) self.assertTrue(tokenizer.AtEnd()) def testConsumeByteString(self): text = '"string1\'' tokenizer = text_format._Tokenizer(text) self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) text = 'string1"' tokenizer = text_format._Tokenizer(text) self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) text = '\n"\\xt"' tokenizer = text_format._Tokenizer(text) self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) text = '\n"\\"' tokenizer = text_format._Tokenizer(text) self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) text = '\n"\\x"' tokenizer = text_format._Tokenizer(text) self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) def testConsumeBool(self): text = 'not-a-bool' tokenizer = text_format._Tokenizer(text) self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool) if __name__ == '__main__': unittest.main()
bsd-3-clause
peterfpeterson/mantid
Framework/PythonInterface/test/python/plugins/functions/RFresonanceTest.py
3
1312
# Mantid Repository : https://github.com/mantidproject/mantid # # Copyright &copy; 2019 ISIS Rutherford Appleton Laboratory UKRI, # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + import unittest import numpy as np from MsdTestHelper import (is_registered, check_output, do_a_fit) class RFresonanceTest(unittest.TestCase): def test_function_has_been_registered(self): status, msg = is_registered("RFresonance") if not status: self.fail(msg) def test_function_output(self): input = [0.0, 4.0, 8.0, 12.0] expected = [0.2, 0.10553037744199503, 0.09243960178747213, 0.09990230524469997] tolerance = 1.0e-05 status, output = check_output("RFresonance", input, expected, tolerance, A0 = 0.2, Boffset = 10, B1 = 10, B1GauWidth = 0.2) if not status: msg = 'Computed output {} from input {} unequal to expected: {}' self.fail(msg.format(*[str(i) for i in (output, input, expected)])) def test_do_fit(self): do_a_fit(np.arange(0.1, 16, 0.2), 'RFresonance', guess = dict(A0 = 0.25, Boffset = 10.5, B1 = 10.5, B1GauWidth = 0.25), target = dict(A0 = 0.2, Boffset = 10, B1 = 10, B1GauWidth = 0.2), atol = 0.01) if __name__ == '__main__': unittest.main()
gpl-3.0
Azulinho/ansible
lib/ansible/plugins/action/vyos.py
6
3640
# # (c) 2016 Red Hat Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import sys import copy from ansible import constants as C from ansible.plugins.action.normal import ActionModule as _ActionModule from ansible.module_utils._text import to_text from ansible.module_utils.connection import Connection from ansible.module_utils.network.common.utils import load_provider from ansible.module_utils.network.vyos.vyos import vyos_provider_spec try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() class ActionModule(_ActionModule): def run(self, tmp=None, task_vars=None): socket_path = None if self._play_context.connection == 'network_cli': provider = self._task.args.get('provider', {}) if any(provider.values()): display.warning('provider is unnecessary when using network_cli and will be ignored') elif self._play_context.connection == 'local': provider = load_provider(vyos_provider_spec, self._task.args) pc = copy.deepcopy(self._play_context) pc.connection = 'network_cli' pc.network_os = 'vyos' pc.remote_addr = provider['host'] or self._play_context.remote_addr pc.port = int(provider['port'] or self._play_context.port or 22) pc.remote_user = provider['username'] or self._play_context.connection_user pc.password = provider['password'] or self._play_context.password pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file pc.timeout = int(provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT) display.vvv('using connection plugin %s' % pc.connection, pc.remote_addr) connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin) socket_path = connection.run() display.vvvv('socket_path: %s' % socket_path, pc.remote_addr) if not socket_path: return {'failed': True, 'msg': 'unable to open shell. Please see: ' + 'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'} task_vars['ansible_socket'] = socket_path # make sure we are in the right cli context which should be # enable mode and not config module if socket_path is None: socket_path = self._connection.socket_path conn = Connection(socket_path) out = conn.get_prompt() while to_text(out, errors='surrogate_then_replace').strip().endswith(')#'): display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr) conn.send_command('abort') out = conn.get_prompt() result = super(ActionModule, self).run(tmp, task_vars) return result
gpl-3.0
Azure/azure-sdk-for-python
sdk/redhatopenshift/azure-mgmt-redhatopenshift/azure/mgmt/redhatopenshift/_azure_red_hat_open_shift4_client.py
1
5741
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING from azure.mgmt.core import ARMPipelineClient from azure.profiles import KnownProfiles, ProfileDefinition from azure.profiles.multiapiclient import MultiApiClientMixin from msrest import Deserializer, Serializer from ._configuration import AzureRedHatOpenShift4ClientConfiguration if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Optional from azure.core.credentials import TokenCredential from azure.core.pipeline.transport import HttpRequest, HttpResponse class _SDKClient(object): def __init__(self, *args, **kwargs): """This is a fake class to support current implemetation of MultiApiClientMixin." Will be removed in final version of multiapi azure-core based client """ pass class AzureRedHatOpenShift4Client(MultiApiClientMixin, _SDKClient): """Rest API for Azure Red Hat OpenShift 4. This ready contains multiple API versions, to help you deal with all of the Azure clouds (Azure Stack, Azure Government, Azure China, etc.). By default, it uses the latest API version available on public Azure. For production, you should stick to a particular api-version and/or profile. The profile sets a mapping between an operation group and its API version. The api-version parameter sets the default API version if the operation group is not described in the profile. :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. :type subscription_id: str :param api_version: API version to use if no profile is provided, or if missing in profile. :type api_version: str :param base_url: Service URL :type base_url: str :param profile: A profile definition, from KnownProfiles to dict. :type profile: azure.profiles.KnownProfiles :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ DEFAULT_API_VERSION = '2020-04-30' _PROFILE_TAG = "azure.mgmt.redhatopenshift.AzureRedHatOpenShift4Client" LATEST_PROFILE = ProfileDefinition({ _PROFILE_TAG: { None: DEFAULT_API_VERSION, }}, _PROFILE_TAG + " latest" ) def __init__( self, credential, # type: "TokenCredential" subscription_id, # type: str api_version=None, # type: Optional[str] base_url=None, # type: Optional[str] profile=KnownProfiles.default, # type: KnownProfiles **kwargs # type: Any ): if not base_url: base_url = 'https://management.azure.com' self._config = AzureRedHatOpenShift4ClientConfiguration(credential, subscription_id, **kwargs) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) super(AzureRedHatOpenShift4Client, self).__init__( api_version=api_version, profile=profile ) @classmethod def _models_dict(cls, api_version): return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)} @classmethod def models(cls, api_version=DEFAULT_API_VERSION): """Module depends on the API version: * 2020-04-30: :mod:`v2020_04_30.models<azure.mgmt.redhatopenshift.v2020_04_30.models>` """ if api_version == '2020-04-30': from .v2020_04_30 import models return models raise ValueError("API version {} is not available".format(api_version)) @property def open_shift_clusters(self): """Instance depends on the API version: * 2020-04-30: :class:`OpenShiftClustersOperations<azure.mgmt.redhatopenshift.v2020_04_30.operations.OpenShiftClustersOperations>` """ api_version = self._get_api_version('open_shift_clusters') if api_version == '2020-04-30': from .v2020_04_30.operations import OpenShiftClustersOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'open_shift_clusters'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @property def operations(self): """Instance depends on the API version: * 2020-04-30: :class:`Operations<azure.mgmt.redhatopenshift.v2020_04_30.operations.Operations>` """ api_version = self._get_api_version('operations') if api_version == '2020-04-30': from .v2020_04_30.operations import Operations as OperationClass else: raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) def close(self): self._client.close() def __enter__(self): self._client.__enter__() return self def __exit__(self, *exc_details): self._client.__exit__(*exc_details)
mit
muntasirsyed/intellij-community
python/helpers/docutils/transforms/components.py
196
1993
# $Id: components.py 4564 2006-05-21 20:44:42Z wiemann $ # Author: David Goodger <goodger@python.org> # Copyright: This module has been placed in the public domain. """ Docutils component-related transforms. """ __docformat__ = 'reStructuredText' import sys import os import re import time from docutils import nodes, utils from docutils import ApplicationError, DataError from docutils.transforms import Transform, TransformError class Filter(Transform): """ Include or exclude elements which depend on a specific Docutils component. For use with `nodes.pending` elements. A "pending" element's dictionary attribute ``details`` must contain the keys "component" and "format". The value of ``details['component']`` must match the type name of the component the elements depend on (e.g. "writer"). The value of ``details['format']`` is the name of a specific format or context of that component (e.g. "html"). If the matching Docutils component supports that format or context, the "pending" element is replaced by the contents of ``details['nodes']`` (a list of nodes); otherwise, the "pending" element is removed. For example, the reStructuredText "meta" directive creates a "pending" element containing a "meta" element (in ``pending.details['nodes']``). Only writers (``pending.details['component'] == 'writer'``) supporting the "html" format (``pending.details['format'] == 'html'``) will include the "meta" element; it will be deleted from the output of all other writers. """ default_priority = 780 def apply(self): pending = self.startnode component_type = pending.details['component'] # 'reader' or 'writer' format = pending.details['format'] component = self.document.transformer.components[component_type] if component.supports(format): pending.replace_self(pending.details['nodes']) else: pending.parent.remove(pending)
apache-2.0
DMLoy/ECommerceBasic
lib/python2.7/site-packages/django/contrib/gis/geos/linestring.py
224
5607
from django.contrib.gis.geos.base import numpy from django.contrib.gis.geos.coordseq import GEOSCoordSeq from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.geometry import GEOSGeometry from django.contrib.gis.geos.point import Point from django.contrib.gis.geos import prototypes as capi from django.utils.six.moves import xrange class LineString(GEOSGeometry): _init_func = capi.create_linestring _minlength = 2 #### Python 'magic' routines #### def __init__(self, *args, **kwargs): """ Initializes on the given sequence -- may take lists, tuples, NumPy arrays of X,Y pairs, or Point objects. If Point objects are used, ownership is _not_ transferred to the LineString object. Examples: ls = LineString((1, 1), (2, 2)) ls = LineString([(1, 1), (2, 2)]) ls = LineString(array([(1, 1), (2, 2)])) ls = LineString(Point(1, 1), Point(2, 2)) """ # If only one argument provided, set the coords array appropriately if len(args) == 1: coords = args[0] else: coords = args if isinstance(coords, (tuple, list)): # Getting the number of coords and the number of dimensions -- which # must stay the same, e.g., no LineString((1, 2), (1, 2, 3)). ncoords = len(coords) if coords: ndim = len(coords[0]) else: raise TypeError('Cannot initialize on empty sequence.') self._checkdim(ndim) # Incrementing through each of the coordinates and verifying for i in xrange(1, ncoords): if not isinstance(coords[i], (tuple, list, Point)): raise TypeError('each coordinate should be a sequence (list or tuple)') if len(coords[i]) != ndim: raise TypeError('Dimension mismatch.') numpy_coords = False elif numpy and isinstance(coords, numpy.ndarray): shape = coords.shape # Using numpy's shape. if len(shape) != 2: raise TypeError('Too many dimensions.') self._checkdim(shape[1]) ncoords = shape[0] ndim = shape[1] numpy_coords = True else: raise TypeError('Invalid initialization input for LineStrings.') # Creating a coordinate sequence object because it is easier to # set the points using GEOSCoordSeq.__setitem__(). cs = GEOSCoordSeq(capi.create_cs(ncoords, ndim), z=bool(ndim==3)) for i in xrange(ncoords): if numpy_coords: cs[i] = coords[i,:] elif isinstance(coords[i], Point): cs[i] = coords[i].tuple else: cs[i] = coords[i] # If SRID was passed in with the keyword arguments srid = kwargs.get('srid', None) # Calling the base geometry initialization with the returned pointer # from the function. super(LineString, self).__init__(self._init_func(cs.ptr), srid=srid) def __iter__(self): "Allows iteration over this LineString." for i in xrange(len(self)): yield self[i] def __len__(self): "Returns the number of points in this LineString." return len(self._cs) def _get_single_external(self, index): return self._cs[index] _get_single_internal = _get_single_external def _set_list(self, length, items): ndim = self._cs.dims # hasz = self._cs.hasz # I don't understand why these are different # create a new coordinate sequence and populate accordingly cs = GEOSCoordSeq(capi.create_cs(length, ndim), z=hasz) for i, c in enumerate(items): cs[i] = c ptr = self._init_func(cs.ptr) if ptr: capi.destroy_geom(self.ptr) self.ptr = ptr self._post_init(self.srid) else: # can this happen? raise GEOSException('Geometry resulting from slice deletion was invalid.') def _set_single(self, index, value): self._checkindex(index) self._cs[index] = value def _checkdim(self, dim): if dim not in (2, 3): raise TypeError('Dimension mismatch.') #### Sequence Properties #### @property def tuple(self): "Returns a tuple version of the geometry from the coordinate sequence." return self._cs.tuple coords = tuple def _listarr(self, func): """ Internal routine that returns a sequence (list) corresponding with the given function. Will return a numpy array if possible. """ lst = [func(i) for i in xrange(len(self))] if numpy: return numpy.array(lst) # ARRRR! else: return lst @property def array(self): "Returns a numpy array for the LineString." return self._listarr(self._cs.__getitem__) @property def merged(self): "Returns the line merge of this LineString." return self._topology(capi.geos_linemerge(self.ptr)) @property def x(self): "Returns a list or numpy array of the X variable." return self._listarr(self._cs.getX) @property def y(self): "Returns a list or numpy array of the Y variable." return self._listarr(self._cs.getY) @property def z(self): "Returns a list or numpy array of the Z variable." if not self.hasz: return None else: return self._listarr(self._cs.getZ) # LinearRings are LineStrings used within Polygons. class LinearRing(LineString): _minLength = 4 _init_func = capi.create_linearring
mit
RedlineResearch/ardupilot
Tools/autotest/param_metadata/wikiemit.py
23
2920
#!/usr/bin/env python import re from emit import Emit from param import known_param_fields, known_units # Emit docs in a form acceptable to the APM wiki site class WikiEmit(Emit): def __init__(self): Emit.__init__(self) wiki_fname = 'Parameters.wiki' self.f = open(wiki_fname, mode='w') preamble = '''#summary Dynamically generated list of documented parameters = Table of Contents = <wiki:toc max_depth="4" /> = Vehicles = ''' self.f.write(preamble) def close(self): self.f.close() def camelcase_escape(self, word): if re.match(r"([A-Z][a-z]+[A-Z][a-z]*)", word.strip()): return "!" + word else: return word def wikichars_escape(self, text): for c in "*,{,},[,],_,=,#,^,~,!,@,$,|,<,>,&,|,\,/".split(','): text = re.sub("\\" + c, '`' + c + '`', text) return text def emit_comment(self, s): self.f.write("\n\n=" + s + "=\n\n") def start_libraries(self): self.emit_comment("Libraries") def emit(self, g, f): t = "\n\n== %s Parameters ==\n" % (self.camelcase_escape(g.name)) for param in g.params: if hasattr(param, 'DisplayName'): t += "\n\n=== %s (%s) ===" % (self.camelcase_escape(param.DisplayName), self.camelcase_escape(param.name)) else: t += "\n\n=== %s ===" % self.camelcase_escape(param.name) if hasattr(param, 'Description'): t += "\n\n_%s_\n" % self.wikichars_escape(param.Description) else: t += "\n\n_TODO: description_\n" for field in param.__dict__.keys(): if field not in ['name', 'DisplayName', 'Description', 'User'] and field in known_param_fields: if field == 'Values' and Emit.prog_values_field.match(param.__dict__[field]): t += " * Values \n" values = (param.__dict__[field]).split(',') t += "|| *Value* || *Meaning* ||\n" for value in values: v = value.split(':') t += "|| " + v[0] + " || " + self.camelcase_escape(v[1]) + " ||\n" elif field == 'Units': abreviated_units = param.__dict__[field] if abreviated_units != '': units = known_units[abreviated_units] # use the known_units dictionary to convert the abreviated unit into a full textual one t += " * %s: %s\n" % (self.camelcase_escape(field), self.wikichars_escape(units)) else: t += " * %s: %s\n" % (self.camelcase_escape(field), self.wikichars_escape(param.__dict__[field])) # print t self.f.write(t)
gpl-3.0
jmahler/linux-next
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py
4653
3596
# EventClass.py # # This is a library defining some events types classes, which could # be used by other scripts to analyzing the perf samples. # # Currently there are just a few classes defined for examples, # PerfEvent is the base class for all perf event sample, PebsEvent # is a HW base Intel x86 PEBS event, and user could add more SW/HW # event classes based on requirements. import struct # Event types, user could add more here EVTYPE_GENERIC = 0 EVTYPE_PEBS = 1 # Basic PEBS event EVTYPE_PEBS_LL = 2 # PEBS event with load latency info EVTYPE_IBS = 3 # # Currently we don't have good way to tell the event type, but by # the size of raw buffer, raw PEBS event with load latency data's # size is 176 bytes, while the pure PEBS event's size is 144 bytes. # def create_event(name, comm, dso, symbol, raw_buf): if (len(raw_buf) == 144): event = PebsEvent(name, comm, dso, symbol, raw_buf) elif (len(raw_buf) == 176): event = PebsNHM(name, comm, dso, symbol, raw_buf) else: event = PerfEvent(name, comm, dso, symbol, raw_buf) return event class PerfEvent(object): event_num = 0 def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC): self.name = name self.comm = comm self.dso = dso self.symbol = symbol self.raw_buf = raw_buf self.ev_type = ev_type PerfEvent.event_num += 1 def show(self): print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso) # # Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer # contains the context info when that event happened: the EFLAGS and # linear IP info, as well as all the registers. # class PebsEvent(PerfEvent): pebs_num = 0 def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS): tmp_buf=raw_buf[0:80] flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf) self.flags = flags self.ip = ip self.ax = ax self.bx = bx self.cx = cx self.dx = dx self.si = si self.di = di self.bp = bp self.sp = sp PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type) PebsEvent.pebs_num += 1 del tmp_buf # # Intel Nehalem and Westmere support PEBS plus Load Latency info which lie # in the four 64 bit words write after the PEBS data: # Status: records the IA32_PERF_GLOBAL_STATUS register value # DLA: Data Linear Address (EIP) # DSE: Data Source Encoding, where the latency happens, hit or miss # in L1/L2/L3 or IO operations # LAT: the actual latency in cycles # class PebsNHM(PebsEvent): pebs_nhm_num = 0 def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL): tmp_buf=raw_buf[144:176] status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf) self.status = status self.dla = dla self.dse = dse self.lat = lat PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type) PebsNHM.pebs_nhm_num += 1 del tmp_buf
gpl-2.0
openstack/octavia
octavia/db/migration/alembic_migrations/versions/8c0851bdf6c3_change_tls_container_id_length_in_sni_.py
1
1043
# Copyright 2016 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """change_tls_container_id_length_in_sni_table Revision ID: 8c0851bdf6c3 Revises: 186509101b9b Create Date: 2016-03-23 19:08:53.148812 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '8c0851bdf6c3' down_revision = '186509101b9b' def upgrade(): op.alter_column(u'sni', u'tls_container_id', type_=sa.String(128), existing_type=sa.String(36), nullable=False)
apache-2.0
apexdatasolutions/VistA
Scripts/Testing/PyUnit/TestPatchOrderGenerator.py
7
3660
#--------------------------------------------------------------------------- # Copyright 2013 The Open Source Electronic Health Record Agent # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #--------------------------------------------------------------------------- import os import sys import unittest curDir = os.path.dirname(os.path.abspath(__file__)) scriptDir = os.path.normpath(os.path.join(curDir, "../../")) if scriptDir not in sys.path: sys.path.append(scriptDir) PACKAGE_DIR = os.path.normpath(os.path.join(scriptDir, "../Packages")) TEST_PACKAGE_DIR = os.path.join(curDir, "Packages") from PatchOrderGenerator import PatchOrderGenerator, topologicSort from LoggerManager import logger, initConsoleLogging import logging class TestPatchOrderGenerator(unittest.TestCase): def __init__(self, methodName='runTest'): unittest.TestCase.__init__(self, methodName) self.handler = None def setUp(self): logger.setLevel(logging.ERROR) self.handler = initConsoleLogging(logging.ERROR) def tearDown(self): logger.removeHandler(self.handler) def test_generatePatchOrderTopologic(self): patchOrderGen = PatchOrderGenerator() patchOrder = patchOrderGen.generatePatchOrderTopologic(PACKAGE_DIR) self.assertTrue(patchOrder, "no valid patch order is generated") def test_generatePatchOrderTopologicSample(self): patchOrderGen = PatchOrderGenerator() patchOrder = patchOrderGen.generatePatchOrderTopologic(TEST_PACKAGE_DIR) self.verifySampleOrder(patchOrder) def verifySampleOrder(self, patchOrder): self.assertTrue(patchOrder, "no valid patch order is generated") expectedOrder = ['LR*5.2*382', 'HDI*1.0*7', 'LR*5.2*350', 'LA*5.2*74', 'LR*5.2*420' ] installList = [x.installName for x in patchOrder] self.assertEqual(installList, expectedOrder) def test_topologicSort(self): depDict = {'2': ['11'], '9': ['11', '8'], '10': ['11', '3'], '11': ['7', '5'], '8': ['7' , '3'], '12': [], } result = topologicSort(depDict) self.assertTrue(result, "no valid order is generated") self.assertTrue('12' in result, "orphan node is ignored") print result result = topologicSort(depDict, '9') self.assertTrue(result, "no valid order is generated") print result result = topologicSort(depDict, '10') self.assertTrue(result, "no valid order is generated") print result result = topologicSort(depDict, '2') self.assertTrue(result, "no valid order is generated") print result self.assertTrue(result, "no valid order is generated") # this will create a cycle among 5, 11, 10 depDict.update({'5': ['10']}) self.assertRaises(Exception, topologicSort, depDict) # this will create a cycle among 2, 5, 8, 11 depDict.update({'5': ['8'], '8': ['7', '3', '2']}) self.assertRaises(Exception, topologicSort, depDict) if __name__ == '__main__': suite = unittest.TestLoader().loadTestsFromTestCase(TestPatchOrderGenerator) unittest.TextTestRunner(verbosity=2).run(suite)
apache-2.0
CyanogenMod/android_external_chromium_org
tools/python/google/process_utils.py
186
7475
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Shared process-related utility functions.""" import errno import os import subprocess import sys class CommandNotFound(Exception): pass TASKKILL = os.path.join(os.environ['WINDIR'], 'system32', 'taskkill.exe') TASKKILL_PROCESS_NOT_FOUND_ERR = 128 # On windows 2000 there is no taskkill.exe, we need to have pskill somewhere # in the path. PSKILL = 'pskill.exe' PSKILL_PROCESS_NOT_FOUND_ERR = -1 def KillAll(executables): """Tries to kill all copies of each process in the processes list. Returns an error if any running processes couldn't be killed. """ result = 0 if os.path.exists(TASKKILL): command = [TASKKILL, '/f', '/im'] process_not_found_err = TASKKILL_PROCESS_NOT_FOUND_ERR else: command = [PSKILL, '/t'] process_not_found_err = PSKILL_PROCESS_NOT_FOUND_ERR for name in executables: new_error = RunCommand(command + [name]) # Ignore "process not found" error. if new_error != 0 and new_error != process_not_found_err: result = new_error return result def RunCommandFull(command, verbose=True, collect_output=False, print_output=True): """Runs the command list. Prints the given command (which should be a list of one or more strings). If specified, prints its stderr (and optionally stdout) to stdout, line-buffered, converting line endings to CRLF (see note below). If specified, collects the output as a list of lines and returns it. Waits for the command to terminate and returns its status. Args: command: the full command to run, as a list of one or more strings verbose: if True, combines all output (stdout and stderr) into stdout. Otherwise, prints only the command's stderr to stdout. collect_output: if True, collects the output of the command as a list of lines and returns it print_output: if True, prints the output of the command Returns: A tuple consisting of the process's exit status and output. If collect_output is False, the output will be []. Raises: CommandNotFound if the command executable could not be found. """ print '\n' + subprocess.list2cmdline(command).replace('\\', '/') + '\n', ### if verbose: out = subprocess.PIPE err = subprocess.STDOUT else: out = file(os.devnull, 'w') err = subprocess.PIPE try: proc = subprocess.Popen(command, stdout=out, stderr=err, bufsize=1) except OSError, e: if e.errno == errno.ENOENT: raise CommandNotFound('Unable to find "%s"' % command[0]) raise output = [] if verbose: read_from = proc.stdout else: read_from = proc.stderr line = read_from.readline() while line: line = line.rstrip() if collect_output: output.append(line) if print_output: # Windows Python converts \n to \r\n automatically whenever it # encounters it written to a text file (including stdout). The only # way around it is to write to a binary file, which isn't feasible for # stdout. So we end up with \r\n here even though we explicitly write # \n. (We could write \r instead, which doesn't get converted to \r\n, # but that's probably more troublesome for people trying to read the # files.) print line + '\n', # Python on windows writes the buffer only when it reaches 4k. This is # not fast enough for all purposes. sys.stdout.flush() line = read_from.readline() # Make sure the process terminates. proc.wait() if not verbose: out.close() return (proc.returncode, output) def RunCommand(command, verbose=True): """Runs the command list, printing its output and returning its exit status. Prints the given command (which should be a list of one or more strings), then runs it and prints its stderr (and optionally stdout) to stdout, line-buffered, converting line endings to CRLF. Waits for the command to terminate and returns its status. Args: command: the full command to run, as a list of one or more strings verbose: if True, combines all output (stdout and stderr) into stdout. Otherwise, prints only the command's stderr to stdout. Returns: The process's exit status. Raises: CommandNotFound if the command executable could not be found. """ return RunCommandFull(command, verbose)[0] def RunCommandsInParallel(commands, verbose=True, collect_output=False, print_output=True): """Runs a list of commands in parallel, waits for all commands to terminate and returns their status. If specified, the ouput of commands can be returned and/or printed. Args: commands: the list of commands to run, each as a list of one or more strings. verbose: if True, combines stdout and stderr into stdout. Otherwise, prints only the command's stderr to stdout. collect_output: if True, collects the output of the each command as a list of lines and returns it. print_output: if True, prints the output of each command. Returns: A list of tuples consisting of each command's exit status and output. If collect_output is False, the output will be []. Raises: CommandNotFound if any of the command executables could not be found. """ command_num = len(commands) outputs = [[] for i in xrange(command_num)] procs = [None for i in xrange(command_num)] eofs = [False for i in xrange(command_num)] for command in commands: print '\n' + subprocess.list2cmdline(command).replace('\\', '/') + '\n', if verbose: out = subprocess.PIPE err = subprocess.STDOUT else: out = file(os.devnull, 'w') err = subprocess.PIPE for i in xrange(command_num): try: command = commands[i] procs[i] = subprocess.Popen(command, stdout=out, stderr=err, bufsize=1) except OSError, e: if e.errno == errno.ENOENT: raise CommandNotFound('Unable to find "%s"' % command[0]) raise # We could consider terminating the processes already started. # But Popen.kill() is only available in version 2.6. # For now the clean up is done by KillAll. while True: eof_all = True for i in xrange(command_num): if eofs[i]: continue if verbose: read_from = procs[i].stdout else: read_from = procs[i].stderr line = read_from.readline() if line: eof_all = False line = line.rstrip() outputs[i].append(line) if print_output: # Windows Python converts \n to \r\n automatically whenever it # encounters it written to a text file (including stdout). The only # way around it is to write to a binary file, which isn't feasible # for stdout. So we end up with \r\n here even though we explicitly # write \n. (We could write \r instead, which doesn't get converted # to \r\n, but that's probably more troublesome for people trying to # read the files.) print line + '\n', else: eofs[i] = True if eof_all: break # Make sure the process terminates. for i in xrange(command_num): procs[i].wait() if not verbose: out.close() return [(procs[i].returncode, outputs[i]) for i in xrange(command_num)]
bsd-3-clause
billyhunt/osf.io
website/addons/dataverse/views/config.py
7
4919
# -*- coding: utf-8 -*- import httplib as http from flask import request from modularodm import Q from modularodm.storage.base import KeyExistsException from framework.exceptions import HTTPError from framework.auth.decorators import must_be_logged_in from website.project import decorators from website.util.sanitize import assert_clean from website.addons.dataverse import client from website.addons.dataverse.provider import DataverseProvider from website.addons.dataverse.serializer import DataverseSerializer from website.oauth.models import ExternalAccount @must_be_logged_in def dataverse_get_user_accounts(auth): """ Returns the list of all of the current user's authorized Dataverse accounts """ return DataverseSerializer( user_settings=auth.user.get_addon('dataverse') ).serialized_user_settings @must_be_logged_in def dataverse_add_user_account(auth, **kwargs): """Verifies new external account credentials and adds to user's list""" user = auth.user provider = DataverseProvider() host = request.json.get('host').rstrip('/') api_token = request.json.get('api_token') # Verify that credentials are valid client.connect_or_error(host, api_token) # Note: `DataverseSerializer` expects display_name to be a URL try: provider.account = ExternalAccount( provider=provider.short_name, provider_name=provider.name, display_name=host, # no username; show host oauth_key=host, # hijacked; now host oauth_secret=api_token, # hijacked; now api_token provider_id=api_token, # Change to username if Dataverse allows ) provider.account.save() except KeyExistsException: # ... or get the old one provider.account = ExternalAccount.find_one( Q('provider', 'eq', provider.short_name) & Q('provider_id', 'eq', api_token) ) assert provider.account is not None if provider.account not in user.external_accounts: user.external_accounts.append(provider.account) user_addon = auth.user.get_addon('dataverse') if not user_addon: user.add_addon('dataverse') user.save() # Need to ensure that the user has dataverse enabled at this point user.get_or_add_addon('dataverse', auth=auth) user.save() return {} @must_be_logged_in @decorators.must_be_valid_project @decorators.must_have_addon('dataverse', 'node') def dataverse_get_config(node_addon, auth, **kwargs): """API that returns the serialized node settings.""" result = DataverseSerializer( user_settings=auth.user.get_addon('dataverse'), node_settings=node_addon, ).serialized_node_settings return {'result': result}, http.OK @decorators.must_have_permission('write') @decorators.must_have_addon('dataverse', 'user') @decorators.must_have_addon('dataverse', 'node') def dataverse_get_datasets(node_addon, **kwargs): """Get list of datasets from provided Dataverse alias""" alias = request.json.get('alias') connection = client.connect_from_settings(node_addon) dataverse = client.get_dataverse(connection, alias) datasets = client.get_datasets(dataverse) ret = { 'alias': alias, # include alias to verify dataset container 'datasets': [{'title': dataset.title, 'doi': dataset.doi} for dataset in datasets], } return ret, http.OK @decorators.must_have_permission('write') @decorators.must_have_addon('dataverse', 'user') @decorators.must_have_addon('dataverse', 'node') def dataverse_set_config(node_addon, auth, **kwargs): """Saves selected Dataverse and dataset to node settings""" user_settings = node_addon.user_settings user = auth.user if user_settings and user_settings.owner != user: raise HTTPError(http.FORBIDDEN) try: assert_clean(request.json) except AssertionError: # TODO: Test me! raise HTTPError(http.NOT_ACCEPTABLE) alias = request.json.get('dataverse').get('alias') doi = request.json.get('dataset').get('doi') if doi is None: return HTTPError(http.BAD_REQUEST) connection = client.connect_from_settings(node_addon) dataverse = client.get_dataverse(connection, alias) dataset = client.get_dataset(dataverse, doi) node_addon.dataverse_alias = dataverse.alias node_addon.dataverse = dataverse.title node_addon.dataset_doi = dataset.doi node_addon.dataset_id = dataset.id node_addon.dataset = dataset.title node = node_addon.owner node.add_log( action='dataverse_dataset_linked', params={ 'project': node.parent_id, 'node': node._primary_key, 'dataset': dataset.title, }, auth=auth, ) node_addon.save() return {'dataverse': dataverse.title, 'dataset': dataset.title}, http.OK
apache-2.0
chanceraine/nupic
nupic/regions/ImageSensorFilters/BoxFixer.py
17
27788
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2013, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import os import numpy from PIL import Image from nupic.regions.ImageSensorFilters.BaseFilter import BaseFilter class BoxFixer(BaseFilter): """ Does not modify any image pixels, just adjusts the tracking box to defeat hard shadows, etc., and in general to normalize the box with respect to the SMotion map between truth and tracker-generated boxes. """ def __init__(self, #-------------------------------------------------- # General parameters # # Width (in pixels) of smooth window for horizontal # SMotion strength histogram windowX=9, # Height (in pixels) of smooth window for vertical # SMotion strength histogram windowY=9, # Smooth window type; must be one of: 'flat', 'hanning', # 'hamming', 'bartlett', 'blackman' windowType='hanning', #-------------------------------------------------- # Horizontal tightening/splitting parameters # # Minimum smoothed SMotion strength (summed vertically) # for an X position to be considered "strong" (expressed as # a fraction of the maximum vertically-summed SMotion) heightThresh=0.1, # Minimum peak SMotion strength for a secondary lobe # (i.e., not the strongest "primary" lobe) to avoid # being culled secondaryHeightThresh=0.5, # Minimum absolute horizontal length (in pixels) that a # horizontal strong zone must extend to avoid being # culled minAbsZoneLenX=5, # Minimum relative horizontal length (expressed as a # fraction of the total original box width) that a # horizontal strong zone must extend to avoid being # culled minRelZoneLenX=0.15, # Minimum horizontal "gap" width (in terms of absolute # pixels) which a weak zone must extend to avoid # being filled in (if it lies between two strong zones) minAbsWeakLenX=5, # Minimum horizontal "gap" width (as percentage of the # image width) which a weak zone must extend to avoid # being filled in (if it lies between two strong zones) minRelWeakLenX=0.10, # The number of pixels to expand our accepted zones # horizontally prior to tightening/splitting zonePreExpansionX=16, #8, #0, #8, # The number of pixels to expand our accepted zones # norizontally following tightening/splitting zonePostExpansionX=4, #0, #4, #-------------------------------------------------- # Vertical tightening/splitting parameters # # Minimum smoothed SMotion strength (summed horizontally) # for a Y position to be considered "strong" (expressed as # a fraction of the maximum horizontally-summed SMotion) widthThresh=0.1, # Minimum peak SMotion strength for a secondary lobe # (i.e., not the strongest "primary" lobe) to avoid # being culled secondaryWidthThresh=0.20, # Minimum absolute vertical length (in pixels) that a # vertical strong zone must extend to avoid being # culled minAbsZoneLenY=5, # Minimum relative vertical length (expressed as a # fraction of the total original box height) that a # vertical strong zone must extend to avoid being # culled minRelZoneLenY=0.15, # Minimum vertical "gap" width (in terms of absolute # pixels) which a weak zone must extend to avoid # being filled in (if it lies between two strong zones) minAbsWeakLenY=5, # Minimum vertical "gap" width (as percentage of the # image height) which a weak zone must extend to avoid # being filled in (if it lies between two strong zones) minRelWeakLenY=0.30, # The number of pixels to expand our accepted zones # vertically (not yet implemented) zonePreExpansionY=16, #8, #0, #8, # The number of pixels to expand our accepted zones # vertically following tightening/splitting zonePostExpansionY=4, #0, #4, #--------------------------------- # Splitting policy # Controls what to do if our algorithm wants to split # a box. Valid values are 'union' (take the union of # the split pieces) or 'biggest' (use the biggest # split box.) splitPolicy='biggest', #--------------------------------- # Debugging debugMode=False, ): """ """ BaseFilter.__init__(self) self._windowX = windowX self._windowY = windowY self._windowType = windowType self._heightThresh = heightThresh self._secondaryHeightThresh = secondaryHeightThresh self._minAbsZoneLenX = minAbsZoneLenX self._minRelZoneLenX = minRelZoneLenX self._minAbsWeakLenX = minAbsWeakLenX self._minRelWeakLenX = minRelWeakLenX self._zonePreExpansionX = zonePreExpansionX self._zonePostExpansionX = zonePostExpansionX self._widthThresh = widthThresh self._secondaryWidthThresh = secondaryWidthThresh self._minAbsZoneLenY = minAbsZoneLenY self._minRelZoneLenY = minRelZoneLenY self._minAbsWeakLenY = minAbsWeakLenY self._minRelWeakLenY = minRelWeakLenY self._zonePreExpansionY = zonePreExpansionY self._zonePostExpansionY = zonePostExpansionY self._splitPolicy = splitPolicy self._debugMode = debugMode # Counts how many images we've processed self._imgCounter = 0 # @todo -- perform parameter validation def process(self, image): """ """ assert image.mode == 'LA' smotion, mask = image.split() # Try to get box from info, otherwise from alpha channel origBox = image.info.get('tracking') if not origBox: origBox = mask.getbbox() expandedBox = (max(0, origBox[0] - self._zonePreExpansionX), max(0, origBox[1] - self._zonePreExpansionY), min(image.size[0], origBox[2] + self._zonePreExpansionX), min(image.size[1], origBox[3] + self._zonePreExpansionY)) imgbox = smotion.crop(expandedBox) #imgbox = smotion.crop(origBox) w, h = imgbox.size imgdata = numpy.array(imgbox.getdata()) imgdata.shape = (h, w) # Create binary image indicating whether non-zero # S-Motion exists salpha = (imgdata > 0).astype(int) histX = salpha.mean(axis=0) smoothX = self._smooth(histX, window_len=self._windowX, window=self._windowType) #----------------------------------------------------------------------- # Apply tightening/splitting in horizontal direction # Pre-compute the minimum length of a strong # zone that we'll accept. # This is the max of an absolute length and a # minimum fraction of the original box. minZoneLen = max(self._minAbsZoneLenX, int(round(self._minRelZoneLenX \ * float(len(smoothX))))) # Minimum length for a weak gap minWeakLen = max(self._minAbsWeakLenX, int(round(self._minRelWeakLenX \ * float(len(smoothX))))) maxX = smoothX.max() # For now, simple threshold threshX = self._heightThresh * maxX strongX = (smoothX >= threshX).astype(int) # Pre-calculate the minimum peak strength for # each lobe to avoid being culled minStrength = maxX * self._secondaryHeightThresh # Find changes: # If deltas[k] == 1, then strongX[k+1] is the # beginning of a new strong block; # If deltas[k] == -1, then strongX[k+1] is the # beginning of a new weak block deltas = strongX[1:] - strongX[:-1] changes = numpy.where(deltas)[0] # Form our block lists strongZonesX = [] if strongX[0]: curZoneStart = 0 else: curZoneStart = None for changeIdx in changes: strongIdx = changeIdx + 1 changeDir = deltas[changeIdx] # Start of new strong zone if changeDir == 1: assert curZoneStart is None curZoneStart = strongIdx # End of existing strong zone else: assert changeDir == -1 assert curZoneStart is not None strongZone = (curZoneStart, strongIdx) self._acceptOrCull(smoothX, strongZonesX, strongZone, minZoneLen, minStrength) curZoneStart = None # Last one if curZoneStart is not None: strongZone = (curZoneStart, len(strongX)) self._acceptOrCull(smoothX, strongZonesX, strongZone, minZoneLen, minStrength) # Remove tiny/thin weak gaps if len(strongZonesX) > 1: tempZones = [] lastZone = strongZonesX[0] for startIdx, endIdx in strongZonesX[1:]: if startIdx - lastZone[1] >= minWeakLen: tempZones += [lastZone] lastZone = (startIdx, endIdx) else: lastZone = (lastZone[0], endIdx) tempZones += [lastZone] strongZonesX = tempZones #----------------------------------------------------------------------- # Apply tightening/splitting in vertical direction (to each strong zone) strongZonesAll = [] for strongZoneX in strongZonesX: #histY = salpha.mean(axis=1) histY = salpha[:,strongZoneX[0]:strongZoneX[1]].mean(axis=1) smoothY = self._smooth(histY, window_len=self._windowY, window=self._windowType) # Pre-compute the minimum length of a strong # zone that we'll accept. # This is the max of an absolute length and a # minimum fraction of the original box. minZoneLen = max(self._minAbsZoneLenY, int(round(self._minRelZoneLenY \ * float(len(smoothY))))) # Minimum length for a weak gap minWeakLen = max(self._minAbsWeakLenY, int(round(self._minRelWeakLenY \ * float(len(smoothY))))) maxY = smoothY.max() # For now, simple threshold threshY = self._widthThresh * maxY strongY = (smoothY >= threshY).astype(int) # Pre-calculate the minimum peak strength for # each lobe to avoid being culled minStrength = maxY * self._secondaryWidthThresh # Find changes: deltas = strongY[1:] - strongY[:-1] changes = numpy.where(deltas)[0] # Form our block lists strongZonesY = [] if strongY[0]: curZoneStart = 0 else: curZoneStart = None for changeIdx in changes: strongIdx = changeIdx + 1 changeDir = deltas[changeIdx] # Start of new strong zone if changeDir == 1: assert curZoneStart is None curZoneStart = strongIdx # End of existing strong zone else: assert changeDir == -1 assert curZoneStart is not None strongZoneY = (curZoneStart, strongIdx) self._acceptOrCull(smoothY, strongZonesY, strongZoneY, minZoneLen, minStrength) curZoneStart = None # Last one if curZoneStart is not None: strongZoneY = (curZoneStart, len(strongY)) self._acceptOrCull(smoothY, strongZonesY, strongZoneY, minZoneLen, minStrength) # Remove tiny/thin weak gaps if len(strongZonesY) > 1: tempZones = [] lastZone = strongZonesY[0] for startIdx, endIdx in strongZonesY[1:]: if startIdx - lastZone[1] >= minWeakLen: tempZones += [lastZone] lastZone = (startIdx, endIdx) else: lastZone = (lastZone[0], endIdx) tempZones += [lastZone] strongZonesY = tempZones left, right = strongZoneX #strongZonesAll.extend([(left, top, right, bottom) for (top, bottom) in strongZonesY]) for (top, bottom) in strongZonesY: expandedZone = (max(0, left - self._zonePostExpansionX), max(0, top - self._zonePostExpansionY), min(image.size[0], right + self._zonePostExpansionX), min(image.size[1], bottom + self._zonePostExpansionY)) if expandedZone[2] < expandedZone[0]: expandedZone[2] = expandedZone[0] if expandedZone[3] < expandedZone[1]: expandedZone[3] = expandedZone[1] strongZonesAll += [expandedZone] if False: # Obtain the videoID and sequenceID imgDir, imgName = os.path.split(imagePath) imgPrefix = os.path.split(imgDir)[1] # Example: # overlap.0550_sequence10067 match = re.match(r"^(?P<mnemonic>[a-z]+)\.(?P<videoID>[0-9]{4})_sequence(?P<seqID>\-?[0-9]{1,5})$", imgPrefix) if not match: match = re.match(r"^vid(?P<videoID>[0-9]{4})_seq(?P<seqID>\-?[0-9]{3,4})$", imgPrefix) assert match d = match.groupdict() videoID = int(d['videoID']) seqID = int(d['seqID']) key = (videoID, seqID) numZones = len(strongZonesAll) # Debugging (and inefficient) if self._debugMode: # Mark up the img box blank = Image.new('L', image.size, 0) base = smotion.convert('RGB') alphaNew = blank.copy() alphaOrig = blank.copy() alphaOrig.paste(255, origBox) #for zoneStart, zoneEnd in strongZones: for (left, top, right, bottom) in strongZonesAll: zoneBox = (expandedBox[0] + left, expandedBox[1] + top, expandedBox[0] + right, expandedBox[1] + bottom) #zoneBox = (origBox[0] + left, origBox[1] + top, # origBox[0] + right, origBox[1] + bottom) alphaNew.paste(255, zoneBox) blender = Image.merge('RGB', (alphaOrig, alphaNew, blank)) #blender = Image.merge('RGB', (alphaOrig, blank, alphaNew)) blendFraction = 0.5 resultImg = Image.blend(base, blender, blendFraction) # Dump marked-up images to disk #imgDir, imgName = os.path.split(imagePath) #imgPrefix = os.path.split(imgDir)[1] dstPath = "%06d.png" % self._imgCounter dstName, dstExt = os.path.splitext(dstPath) dstName += "__Z%d" % numZones dstPath = dstName + dstExt resultImg.save(dstPath) # Print stats #(left, top, right, bottom) = strongZonesAll[0] #print "ORIG (%3d, %3d, %3d, %3d) [%3dx%3d] ==> (%3d, %3d, %3d, %3d) [%3dx%3d]" % \ # (origBox[0], origBox[1], origBox[2], origBox[3], # origBox[2]-origBox[0], origBox[3]-origBox[1], # left, top, right, bottom, right-left, bottom-top) # If there is more than one box, use the biggest # (this is just a hack heuristic) tightenedZone = None if len(strongZonesAll) > 1: # Take biggest if self._splitPolicy == 'biggest': #print "WARNING: multiple (%d) split boxes...choosing biggest one..." % len(strongZonesAll) bigBoxIdx = None bigBoxArea = -1 for boxIdx, subBox in enumerate(strongZonesAll): area = (subBox[2] - subBox[0]) * (subBox[3] - subBox[1]) if area > bigBoxArea: bigBoxArea = area bigBoxIdx = boxIdx tightenedZone = strongZonesAll[bigBoxIdx] # Take biggest elif self._splitPolicy == 'union': #print "WARNING: multiple (%d) split boxes...taking union..." % len(strongZonesAll) left, top, right, bottom = strongZonesAll[0] for boxIdx, subBox in enumerate(strongZonesAll[1:]): left = min(left, subBox[0]) top = min(top, subBox[1]) right = max(right, subBox[2]) bottom = max(bottom, subBox[3]) tightenedZone = (left, top, right, bottom) elif not strongZonesAll: #print "WARNING: dissipated box...reverting to original..." box = origBox else: assert len(strongZonesAll) == 1 tightenedZone = strongZonesAll[0] if strongZonesAll: subBox = tightenedZone # 'inference' may be None if box was culled box = (expandedBox[0] + subBox[0], expandedBox[1] + subBox[1], expandedBox[0] + subBox[2], expandedBox[1] + subBox[3]) # Print stats (left, top, right, bottom) = box #print "%06d: (%3d, %3d, %3d, %3d) [%3dx%3d] ==> (%3d, %3d, %3d, %3d) [%3dx%3d]" % \ # (self._imgCounter, # origBox[0], origBox[1], origBox[2], origBox[3], # origBox[2]-origBox[0], origBox[3]-origBox[1], # left, top, right, bottom, right-left, bottom-top) alphaNew = Image.new('L', smotion.size, 0) alphaNew.paste(255, box) dstImage = Image.merge('LA', (smotion, alphaNew)) dstImage.info['tracking'] = box # TEMP TEMP TEMP - dump box dims (pre and post fix) if self._debugMode: if self._imgCounter == 0: mode = 'w' self._logBox = open("LOGBOX.txt", mode) print >>self._logBox, "%d %d %d %d %d %d %d %d %d" % \ (self._imgCounter, origBox[0], origBox[1], origBox[2], origBox[3], box[0], box[1], box[2], box[3]) self._imgCounter += 1 return dstImage def _acceptOrCull(self, strength, strongZones, candidateZone, minZoneLen, minStrength): """ Utility method that will append a candidate strong zone to an existing list of strong zones if and only if it's length meets the minimum zone length requirement. """ (startIdx, stopIdx) = candidateZone zoneLen = stopIdx - startIdx assert zoneLen > 0 peakStrength = strength[startIdx:stopIdx].max() if zoneLen >= minZoneLen and \ peakStrength >= minStrength: strongZones += [candidateZone] def _smooth(self, x, window_len=10, window=None): """smooth the data using a window with requested size. This method is based on the convolution of a scaled window with the signal. The signal is prepared by introducing reflected copies of the signal (with the window size) in both ends so that transient parts are minimized in the begining and end part of the output signal. input: x: the input signal window_len: the dimension of the smoothing window window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman' flat window will produce a moving average smoothing. output: the smoothed signal example: t=linspace(-2,2,0.1) x=sin(t)+randn(len(t))*0.1 y=smooth(x) see also: numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve scipy.signal.lfilter TODO: the window parameter could be the window itself if an array instead of a string """ if x.ndim != 1: raise ValueError, "smooth only accepts 1 dimension arrays." if x.size < window_len: window_len = x.size if window_len<3: return x if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']: raise ValueError, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'" s=numpy.r_[2*x[0]-x[window_len:1:-1],x,2*x[-1]-x[-1:-window_len:-1]] if window == 'flat': #moving average w=ones(window_len,'d') else: w=eval('numpy.'+window+'(window_len)') y=numpy.convolve(w/w.sum(),s,mode='same') return y[window_len-1:-window_len+1] if False: def process(self, image): """ Performs the following operations: 1. Locates the original bounding box of the image as defined by the image's alpha mask. It is assumed that the alpha mask will consist of a single rectangular, in which case the resulting bbox will be exactly equivalent to the mask representation. However, if for some reason the positive regions of the alpha mask is not a single rectangle, things will still work. 2. Fit the bounding box to the target dimensions, scaling as needed, and filling in padding regions if needed (if the aspect ratio of the bounding box does not match that of the target dimensions which, in general, will be True.) If padding is needed, we fill from the original image pixels around the bounding box if fillFromImageWherePossible is True and we're not outside the original image bounds, otherwise, we use 'fillValue'. 3. Apply each scale in 'scales' to the resulting cropped image, and pad each side by 'padding' (pulling from the real image pixels when possible, and filling with 'fillValue' where not.) 4. Return the list of cropped images. """ BaseFilter.process(self, image) assert image.mode == "LA" # Pull bbox of the alpha mask if 'tracking' in image.info: bbox = image.info['tracking'] else: bbox = image.split()[1].getbbox() # If alpha channel is completely empty, we will end up # with a bbox of 'None'. Nothing much we can do if bbox is None: bbox = (0, 0, image.size[0], image.size[1]) print 'WARNING: empty alpha channel' # Ascertain the original raw size of the tracking box width = bbox[2] - bbox[0] height = bbox[3] - bbox[1] newImages = [] for scaleIdx, scale in enumerate(self._scales): # Target dimensions depend on the scale at which we're operating targetDims = (self._targetDims[0] * scale, self._targetDims[1] * scale) scaleFactorX = float(targetDims[0]) / float(width) scaleFactorY = float(targetDims[1]) / float(height) # Determine the scaling factors needed to map the # bounding box to the target dimensions (prior to # padding be accounted for) if self._preservationMode is None: pass elif self._preservationMode == "aspect": scaleFactor = min(scaleFactorX, scaleFactorY) scaleFactorX = scaleFactor scaleFactorY = scaleFactor else: assert self._preservationMode == "size" scaleFactorX = scale scaleFactorY = scale # Now, holding the scaling factor constant, compute the # size of the src box in the original image that will # produce the correctly padded target size targetWidth = int(round(targetDims[0])) + 2*self._padding targetHeight = int(round(targetDims[1])) + 2*self._padding srcWidth = float(targetWidth) / scaleFactorX srcHeight = float(targetHeight) / scaleFactorY # Compute the exact coordinates of the source box if self._fillFromImageWherePossible: origCenterX = float(bbox[0] + bbox[2]) * 0.5 origCenterY = float(bbox[1] + bbox[3]) * 0.5 halfWidth = srcWidth * 0.5 halfHeight = srcHeight * 0.5 srcBox = (int(round(origCenterX - halfWidth)), int(round(origCenterY - halfHeight)), int(round(origCenterX + halfWidth)), int(round(origCenterY + halfHeight))) # take into account clipping off the image boundary clipBox = (max(srcBox[0], 0), max(srcBox[1], 0), min(srcBox[2], image.size[0]), min(srcBox[3], image.size[1])) clipOffset = (clipBox[0] - srcBox[0], clipBox[1] - srcBox[1]) else: # extend the bbox to include padding pixels on all sides paddedBBox = (int(bbox[0] - self._padding/scaleFactorX), int(bbox[1] - self._padding/scaleFactorY), int(bbox[2] + self._padding/scaleFactorX), int(bbox[3] + self._padding/scaleFactorY)) # take into account clipping off the image boundary clipBox = (max(paddedBBox[0], 0), max(paddedBBox[1], 0), min(paddedBBox[2], image.size[0]), min(paddedBBox[3], image.size[1])) # The srcBox is the correct aspect ratio, and either taller or wider than the # bbox, but not both. srcBox = (0, 0, srcWidth, srcHeight) clipBoxWidth = clipBox[2] - clipBox[0] clipBoxHeight = clipBox[3] - clipBox[1] clipOffset = (int((srcWidth - clipBoxWidth)/2), int((srcHeight - clipBoxHeight)/2)) # Copy the source rect croppedImage = image.crop(clipBox) croppedImage.load() # New development croppedImage.putalpha(Image.new(mode='L', size=croppedImage.size, color=255)) if self._fillValue is None: [gray, alpha] = image.split() hist = numpy.array(gray.histogram(alpha), dtype='float') mean = (hist * self._histWeights).sum() / hist.sum() if mean < 127.5: fillValue = 255 else: fillValue = 0 else: fillValue = self._fillValue # Paste into a new image newImage = Image.new(mode='LA', size=(srcBox[2]-srcBox[0], srcBox[3]-srcBox[1]), color=fillValue) newImage.paste(croppedImage, clipOffset) # Resize the cropped image to the (padded) target size scaledImage = newImage.resize((targetWidth, targetHeight), self._resizingFilter) # Convert and save the scaled image as the output assert scaledImage.mode == 'LA' newImages += [scaledImage] # Dump debugging images to disk if self._dumpDebugImages: self._handleDebug(scaledImage, scaleIdx) return [newImages] def _handleDebug(self, image, scaleIdx, debugDir="tracking.d"): """ Dump tracking boxes to disk for offline analysis """ if not hasattr(self, "_debugIndex"): self._debugIndex = 0 if not os.path.isdir(debugDir): os.mkdir(debugDir) debugPath = os.path.join(debugDir, "tracking.%06d.%02d.png" % \ (self._debugIndex, scaleIdx)) image.save(debugPath) self._debugIndex += 1 def getOutputCount(self): """ Return the number of images returned by each call to process(). If the filter creates multiple simultaneous outputs, return a tuple: (outputCount, simultaneousOutputCount). """ return 1 #return 1, len(self._scales)
agpl-3.0
yaroslavprogrammer/django
django/template/smartif.py
239
6269
""" Parser and utilities for the smart 'if' tag """ # Using a simple top down parser, as described here: # http://effbot.org/zone/simple-top-down-parsing.htm. # 'led' = left denotation # 'nud' = null denotation # 'bp' = binding power (left = lbp, right = rbp) class TokenBase(object): """ Base class for operators and literals, mainly for debugging and for throwing syntax errors. """ id = None # node/token type name value = None # used by literals first = second = None # used by tree nodes def nud(self, parser): # Null denotation - called in prefix context raise parser.error_class( "Not expecting '%s' in this position in if tag." % self.id ) def led(self, left, parser): # Left denotation - called in infix context raise parser.error_class( "Not expecting '%s' as infix operator in if tag." % self.id ) def display(self): """ Returns what to display in error messages for this node """ return self.id def __repr__(self): out = [str(x) for x in [self.id, self.first, self.second] if x is not None] return "(" + " ".join(out) + ")" def infix(bp, func): """ Creates an infix operator, given a binding power and a function that evaluates the node """ class Operator(TokenBase): lbp = bp def led(self, left, parser): self.first = left self.second = parser.expression(bp) return self def eval(self, context): try: return func(context, self.first, self.second) except Exception: # Templates shouldn't throw exceptions when rendering. We are # most likely to get exceptions for things like {% if foo in bar # %} where 'bar' does not support 'in', so default to False return False return Operator def prefix(bp, func): """ Creates a prefix operator, given a binding power and a function that evaluates the node. """ class Operator(TokenBase): lbp = bp def nud(self, parser): self.first = parser.expression(bp) self.second = None return self def eval(self, context): try: return func(context, self.first) except Exception: return False return Operator # Operator precedence follows Python. # NB - we can get slightly more accurate syntax error messages by not using the # same object for '==' and '='. # We defer variable evaluation to the lambda to ensure that terms are # lazily evaluated using Python's boolean parsing logic. OPERATORS = { 'or': infix(6, lambda context, x, y: x.eval(context) or y.eval(context)), 'and': infix(7, lambda context, x, y: x.eval(context) and y.eval(context)), 'not': prefix(8, lambda context, x: not x.eval(context)), 'in': infix(9, lambda context, x, y: x.eval(context) in y.eval(context)), 'not in': infix(9, lambda context, x, y: x.eval(context) not in y.eval(context)), '=': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)), '==': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)), '!=': infix(10, lambda context, x, y: x.eval(context) != y.eval(context)), '>': infix(10, lambda context, x, y: x.eval(context) > y.eval(context)), '>=': infix(10, lambda context, x, y: x.eval(context) >= y.eval(context)), '<': infix(10, lambda context, x, y: x.eval(context) < y.eval(context)), '<=': infix(10, lambda context, x, y: x.eval(context) <= y.eval(context)), } # Assign 'id' to each: for key, op in OPERATORS.items(): op.id = key class Literal(TokenBase): """ A basic self-resolvable object similar to a Django template variable. """ # IfParser uses Literal in create_var, but TemplateIfParser overrides # create_var so that a proper implementation that actually resolves # variables, filters etc is used. id = "literal" lbp = 0 def __init__(self, value): self.value = value def display(self): return repr(self.value) def nud(self, parser): return self def eval(self, context): return self.value def __repr__(self): return "(%s %r)" % (self.id, self.value) class EndToken(TokenBase): lbp = 0 def nud(self, parser): raise parser.error_class("Unexpected end of expression in if tag.") EndToken = EndToken() class IfParser(object): error_class = ValueError def __init__(self, tokens): # pre-pass necessary to turn 'not','in' into single token l = len(tokens) mapped_tokens = [] i = 0 while i < l: token = tokens[i] if token == "not" and i + 1 < l and tokens[i+1] == "in": token = "not in" i += 1 # skip 'in' mapped_tokens.append(self.translate_token(token)) i += 1 self.tokens = mapped_tokens self.pos = 0 self.current_token = self.next_token() def translate_token(self, token): try: op = OPERATORS[token] except (KeyError, TypeError): return self.create_var(token) else: return op() def next_token(self): if self.pos >= len(self.tokens): return EndToken else: retval = self.tokens[self.pos] self.pos += 1 return retval def parse(self): retval = self.expression() # Check that we have exhausted all the tokens if self.current_token is not EndToken: raise self.error_class("Unused '%s' at end of if expression." % self.current_token.display()) return retval def expression(self, rbp=0): t = self.current_token self.current_token = self.next_token() left = t.nud(self) while rbp < self.current_token.lbp: t = self.current_token self.current_token = self.next_token() left = t.led(left, self) return left def create_var(self, value): return Literal(value)
bsd-3-clause
papouso/odoo
openerp/tools/config.py
178
36937
#openerp.loggers.handlers. -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # Copyright (C) 2010-2014 OpenERP s.a. (<http://openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import ConfigParser import optparse import os import sys import openerp import openerp.conf import openerp.loglevels as loglevels import logging import openerp.release as release import appdirs class MyOption (optparse.Option, object): """ optparse Option with two additional attributes. The list of command line options (getopt.Option) is used to create the list of the configuration file options. When reading the file, and then reading the command line arguments, we don't want optparse.parse results to override the configuration file values. But if we provide default values to optparse, optparse will return them and we can't know if they were really provided by the user or not. A solution is to not use optparse's default attribute, but use a custom one (that will be copied to create the default values of the configuration file). """ def __init__(self, *opts, **attrs): self.my_default = attrs.pop('my_default', None) super(MyOption, self).__init__(*opts, **attrs) DEFAULT_LOG_HANDLER = ':INFO' def _check_ssl(): try: from OpenSSL import SSL import socket return hasattr(socket, 'ssl') and hasattr(SSL, "Connection") except: return False def _get_default_datadir(): home = os.path.expanduser('~') if os.path.exists(home): func = appdirs.user_data_dir else: if sys.platform in ['win32', 'darwin']: func = appdirs.site_data_dir else: func = lambda **kwarg: "/var/lib/%s" % kwarg['appname'].lower() # No "version" kwarg as session and filestore paths are shared against series return func(appname=release.product_name, appauthor=release.author) def _deduplicate_loggers(loggers): """ Avoid saving multiple logging levels for the same loggers to a save file, that just takes space and the list can potentially grow unbounded if for some odd reason people use :option`odoo.py --save`` all the time. """ # dict(iterable) -> the last item of iterable for any given key wins, # which is what we want and expect. Output order should not matter as # there are no duplicates within the output sequence return ( '{}:{}'.format(logger, level) for logger, level in dict(it.split(':') for it in loggers).iteritems() ) class configmanager(object): def __init__(self, fname=None): """Constructor. :param fname: a shortcut allowing to instantiate :class:`configmanager` from Python code without resorting to environment variable """ # Options not exposed on the command line. Command line options will be added # from optparse's parser. self.options = { 'admin_passwd': 'admin', 'csv_internal_sep': ',', 'publisher_warranty_url': 'http://services.openerp.com/publisher-warranty/', 'reportgz': False, 'root_path': None, } # Not exposed in the configuration file. self.blacklist_for_save = set([ 'publisher_warranty_url', 'load_language', 'root_path', 'init', 'save', 'config', 'update', 'stop_after_init' ]) # dictionary mapping option destination (keys in self.options) to MyOptions. self.casts = {} self.misc = {} self.config_file = fname self.has_ssl = _check_ssl() self._LOGLEVELS = dict([ (getattr(loglevels, 'LOG_%s' % x), getattr(logging, x)) for x in ('CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'NOTSET') ]) version = "%s %s" % (release.description, release.version) self.parser = parser = optparse.OptionParser(version=version, option_class=MyOption) # Server startup config group = optparse.OptionGroup(parser, "Common options") group.add_option("-c", "--config", dest="config", help="specify alternate config file") group.add_option("-s", "--save", action="store_true", dest="save", default=False, help="save configuration to ~/.openerp_serverrc") group.add_option("-i", "--init", dest="init", help="install one or more modules (comma-separated list, use \"all\" for all modules), requires -d") group.add_option("-u", "--update", dest="update", help="update one or more modules (comma-separated list, use \"all\" for all modules). Requires -d.") group.add_option("--without-demo", dest="without_demo", help="disable loading demo data for modules to be installed (comma-separated, use \"all\" for all modules). Requires -d and -i. Default is %default", my_default=False) group.add_option("-P", "--import-partial", dest="import_partial", my_default='', help="Use this for big data importation, if it crashes you will be able to continue at the current state. Provide a filename to store intermediate importation states.") group.add_option("--pidfile", dest="pidfile", help="file where the server pid will be stored") group.add_option("--addons-path", dest="addons_path", help="specify additional addons paths (separated by commas).", action="callback", callback=self._check_addons_path, nargs=1, type="string") group.add_option("--load", dest="server_wide_modules", help="Comma-separated list of server-wide modules default=web") group.add_option("-D", "--data-dir", dest="data_dir", my_default=_get_default_datadir(), help="Directory where to store Odoo data") parser.add_option_group(group) # XML-RPC / HTTP group = optparse.OptionGroup(parser, "XML-RPC Configuration") group.add_option("--xmlrpc-interface", dest="xmlrpc_interface", my_default='', help="Specify the TCP IP address for the XML-RPC protocol. The empty string binds to all interfaces.") group.add_option("--xmlrpc-port", dest="xmlrpc_port", my_default=8069, help="specify the TCP port for the XML-RPC protocol", type="int") group.add_option("--no-xmlrpc", dest="xmlrpc", action="store_false", my_default=True, help="disable the XML-RPC protocol") group.add_option("--proxy-mode", dest="proxy_mode", action="store_true", my_default=False, help="Enable correct behavior when behind a reverse proxy") group.add_option("--longpolling-port", dest="longpolling_port", my_default=8072, help="specify the TCP port for longpolling requests", type="int") parser.add_option_group(group) # XML-RPC / HTTPS title = "XML-RPC Secure Configuration" if not self.has_ssl: title += " (disabled as ssl is unavailable)" group = optparse.OptionGroup(parser, title) group.add_option("--xmlrpcs-interface", dest="xmlrpcs_interface", my_default='', help="Specify the TCP IP address for the XML-RPC Secure protocol. The empty string binds to all interfaces.") group.add_option("--xmlrpcs-port", dest="xmlrpcs_port", my_default=8071, help="specify the TCP port for the XML-RPC Secure protocol", type="int") group.add_option("--no-xmlrpcs", dest="xmlrpcs", action="store_false", my_default=True, help="disable the XML-RPC Secure protocol") group.add_option("--cert-file", dest="secure_cert_file", my_default='server.cert', help="specify the certificate file for the SSL connection") group.add_option("--pkey-file", dest="secure_pkey_file", my_default='server.pkey', help="specify the private key file for the SSL connection") parser.add_option_group(group) # WEB group = optparse.OptionGroup(parser, "Web interface Configuration") group.add_option("--db-filter", dest="dbfilter", my_default='.*', help="Filter listed database", metavar="REGEXP") parser.add_option_group(group) # Testing Group group = optparse.OptionGroup(parser, "Testing Configuration") group.add_option("--test-file", dest="test_file", my_default=False, help="Launch a python or YML test file.") group.add_option("--test-report-directory", dest="test_report_directory", my_default=False, help="If set, will save sample of all reports in this directory.") group.add_option("--test-enable", action="store_true", dest="test_enable", my_default=False, help="Enable YAML and unit tests.") group.add_option("--test-commit", action="store_true", dest="test_commit", my_default=False, help="Commit database changes performed by YAML or XML tests.") parser.add_option_group(group) # Logging Group group = optparse.OptionGroup(parser, "Logging Configuration") group.add_option("--logfile", dest="logfile", help="file where the server log will be stored") group.add_option("--logrotate", dest="logrotate", action="store_true", my_default=False, help="enable logfile rotation") group.add_option("--syslog", action="store_true", dest="syslog", my_default=False, help="Send the log to the syslog server") group.add_option('--log-handler', action="append", default=[], my_default=DEFAULT_LOG_HANDLER, metavar="PREFIX:LEVEL", help='setup a handler at LEVEL for a given PREFIX. An empty PREFIX indicates the root logger. This option can be repeated. Example: "openerp.orm:DEBUG" or "werkzeug:CRITICAL" (default: ":INFO")') group.add_option('--log-request', action="append_const", dest="log_handler", const="openerp.http.rpc.request:DEBUG", help='shortcut for --log-handler=openerp.http.rpc.request:DEBUG') group.add_option('--log-response', action="append_const", dest="log_handler", const="openerp.http.rpc.response:DEBUG", help='shortcut for --log-handler=openerp.http.rpc.response:DEBUG') group.add_option('--log-web', action="append_const", dest="log_handler", const="openerp.http:DEBUG", help='shortcut for --log-handler=openerp.http:DEBUG') group.add_option('--log-sql', action="append_const", dest="log_handler", const="openerp.sql_db:DEBUG", help='shortcut for --log-handler=openerp.sql_db:DEBUG') group.add_option('--log-db', dest='log_db', help="Logging database", my_default=False) group.add_option('--log-db-level', dest='log_db_level', my_default='warning', help="Logging database level") # For backward-compatibility, map the old log levels to something # quite close. levels = [ 'info', 'debug_rpc', 'warn', 'test', 'critical', 'debug_sql', 'error', 'debug', 'debug_rpc_answer', 'notset' ] group.add_option('--log-level', dest='log_level', type='choice', choices=levels, my_default='info', help='specify the level of the logging. Accepted values: %s.' % (levels,)) parser.add_option_group(group) # SMTP Group group = optparse.OptionGroup(parser, "SMTP Configuration") group.add_option('--email-from', dest='email_from', my_default=False, help='specify the SMTP email address for sending email') group.add_option('--smtp', dest='smtp_server', my_default='localhost', help='specify the SMTP server for sending email') group.add_option('--smtp-port', dest='smtp_port', my_default=25, help='specify the SMTP port', type="int") group.add_option('--smtp-ssl', dest='smtp_ssl', action='store_true', my_default=False, help='if passed, SMTP connections will be encrypted with SSL (STARTTLS)') group.add_option('--smtp-user', dest='smtp_user', my_default=False, help='specify the SMTP username for sending email') group.add_option('--smtp-password', dest='smtp_password', my_default=False, help='specify the SMTP password for sending email') parser.add_option_group(group) group = optparse.OptionGroup(parser, "Database related options") group.add_option("-d", "--database", dest="db_name", my_default=False, help="specify the database name") group.add_option("-r", "--db_user", dest="db_user", my_default=False, help="specify the database user name") group.add_option("-w", "--db_password", dest="db_password", my_default=False, help="specify the database password") group.add_option("--pg_path", dest="pg_path", help="specify the pg executable path") group.add_option("--db_host", dest="db_host", my_default=False, help="specify the database host") group.add_option("--db_port", dest="db_port", my_default=False, help="specify the database port", type="int") group.add_option("--db_maxconn", dest="db_maxconn", type='int', my_default=64, help="specify the the maximum number of physical connections to posgresql") group.add_option("--db-template", dest="db_template", my_default="template1", help="specify a custom database template to create a new database") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Internationalisation options", "Use these options to translate Odoo to another language." "See i18n section of the user manual. Option '-d' is mandatory." "Option '-l' is mandatory in case of importation" ) group.add_option('--load-language', dest="load_language", help="specifies the languages for the translations you want to be loaded") group.add_option('-l', "--language", dest="language", help="specify the language of the translation file. Use it with --i18n-export or --i18n-import") group.add_option("--i18n-export", dest="translate_out", help="export all sentences to be translated to a CSV file, a PO file or a TGZ archive and exit") group.add_option("--i18n-import", dest="translate_in", help="import a CSV or a PO file with translations and exit. The '-l' option is required.") group.add_option("--i18n-overwrite", dest="overwrite_existing_translations", action="store_true", my_default=False, help="overwrites existing translation terms on updating a module or importing a CSV or a PO file.") group.add_option("--modules", dest="translate_modules", help="specify modules to export. Use in combination with --i18n-export") parser.add_option_group(group) security = optparse.OptionGroup(parser, 'Security-related options') security.add_option('--no-database-list', action="store_false", dest='list_db', my_default=True, help="disable the ability to return the list of databases") parser.add_option_group(security) # Advanced options group = optparse.OptionGroup(parser, "Advanced options") if os.name == 'posix': group.add_option('--auto-reload', dest='auto_reload', action='store_true', my_default=False, help='enable auto reload') group.add_option('--debug', dest='debug_mode', action='store_true', my_default=False, help='enable debug mode') group.add_option("--stop-after-init", action="store_true", dest="stop_after_init", my_default=False, help="stop the server after its initialization") group.add_option("-t", "--timezone", dest="timezone", my_default=False, help="specify reference timezone for the server (e.g. Europe/Brussels") group.add_option("--osv-memory-count-limit", dest="osv_memory_count_limit", my_default=False, help="Force a limit on the maximum number of records kept in the virtual " "osv_memory tables. The default is False, which means no count-based limit.", type="int") group.add_option("--osv-memory-age-limit", dest="osv_memory_age_limit", my_default=1.0, help="Force a limit on the maximum age of records kept in the virtual " "osv_memory tables. This is a decimal value expressed in hours, " "and the default is 1 hour.", type="float") group.add_option("--max-cron-threads", dest="max_cron_threads", my_default=2, help="Maximum number of threads processing concurrently cron jobs (default 2).", type="int") group.add_option("--unaccent", dest="unaccent", my_default=False, action="store_true", help="Use the unaccent function provided by the database when available.") group.add_option("--geoip-db", dest="geoip_database", my_default='/usr/share/GeoIP/GeoLiteCity.dat', help="Absolute path to the GeoIP database file.") parser.add_option_group(group) if os.name == 'posix': group = optparse.OptionGroup(parser, "Multiprocessing options") # TODO sensible default for the three following limits. group.add_option("--workers", dest="workers", my_default=0, help="Specify the number of workers, 0 disable prefork mode.", type="int") group.add_option("--limit-memory-soft", dest="limit_memory_soft", my_default=2048 * 1024 * 1024, help="Maximum allowed virtual memory per worker, when reached the worker be reset after the current request (default 671088640 aka 640MB).", type="int") group.add_option("--limit-memory-hard", dest="limit_memory_hard", my_default=2560 * 1024 * 1024, help="Maximum allowed virtual memory per worker, when reached, any memory allocation will fail (default 805306368 aka 768MB).", type="int") group.add_option("--limit-time-cpu", dest="limit_time_cpu", my_default=60, help="Maximum allowed CPU time per request (default 60).", type="int") group.add_option("--limit-time-real", dest="limit_time_real", my_default=120, help="Maximum allowed Real time per request (default 120).", type="int") group.add_option("--limit-request", dest="limit_request", my_default=8192, help="Maximum number of request to be processed per worker (default 8192).", type="int") parser.add_option_group(group) # Copy all optparse options (i.e. MyOption) into self.options. for group in parser.option_groups: for option in group.option_list: if option.dest not in self.options: self.options[option.dest] = option.my_default self.casts[option.dest] = option # generate default config self._parse_config() def parse_config(self, args=None): """ Parse the configuration file (if any) and the command-line arguments. This method initializes openerp.tools.config and openerp.conf (the former should be removed in the furture) with library-wide configuration values. This method must be called before proper usage of this library can be made. Typical usage of this method: openerp.tools.config.parse_config(sys.argv[1:]) """ self._parse_config(args) openerp.netsvc.init_logger() openerp.modules.module.initialize_sys_path() def _parse_config(self, args=None): if args is None: args = [] opt, args = self.parser.parse_args(args) def die(cond, msg): if cond: self.parser.error(msg) # Ensures no illegitimate argument is silently discarded (avoids insidious "hyphen to dash" problem) die(args, "unrecognized parameters: '%s'" % " ".join(args)) die(bool(opt.syslog) and bool(opt.logfile), "the syslog and logfile options are exclusive") die(opt.translate_in and (not opt.language or not opt.db_name), "the i18n-import option cannot be used without the language (-l) and the database (-d) options") die(opt.overwrite_existing_translations and not (opt.translate_in or opt.update), "the i18n-overwrite option cannot be used without the i18n-import option or without the update option") die(opt.translate_out and (not opt.db_name), "the i18n-export option cannot be used without the database (-d) option") # Check if the config file exists (-c used, but not -s) die(not opt.save and opt.config and not os.access(opt.config, os.R_OK), "The config file '%s' selected with -c/--config doesn't exist or is not readable, "\ "use -s/--save if you want to generate it"% opt.config) # place/search the config file on Win32 near the server installation # (../etc from the server) # if the server is run by an unprivileged user, he has to specify location of a config file where he has the rights to write, # else he won't be able to save the configurations, or even to start the server... # TODO use appdirs if os.name == 'nt': rcfilepath = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'openerp-server.conf') else: rcfilepath = os.path.expanduser('~/.openerp_serverrc') self.rcfile = os.path.abspath( self.config_file or opt.config or os.environ.get('OPENERP_SERVER') or rcfilepath) self.load() # Verify that we want to log or not, if not the output will go to stdout if self.options['logfile'] in ('None', 'False'): self.options['logfile'] = False # the same for the pidfile if self.options['pidfile'] in ('None', 'False'): self.options['pidfile'] = False # if defined dont take the configfile value even if the defined value is None keys = ['xmlrpc_interface', 'xmlrpc_port', 'longpolling_port', 'db_name', 'db_user', 'db_password', 'db_host', 'db_port', 'db_template', 'logfile', 'pidfile', 'smtp_port', 'email_from', 'smtp_server', 'smtp_user', 'smtp_password', 'db_maxconn', 'import_partial', 'addons_path', 'xmlrpc', 'syslog', 'without_demo', 'timezone', 'xmlrpcs_interface', 'xmlrpcs_port', 'xmlrpcs', 'secure_cert_file', 'secure_pkey_file', 'dbfilter', 'log_level', 'log_db', 'log_db_level', 'geoip_database', ] for arg in keys: # Copy the command-line argument (except the special case for log_handler, due to # action=append requiring a real default, so we cannot use the my_default workaround) if getattr(opt, arg): self.options[arg] = getattr(opt, arg) # ... or keep, but cast, the config file value. elif isinstance(self.options[arg], basestring) and self.casts[arg].type in optparse.Option.TYPE_CHECKER: self.options[arg] = optparse.Option.TYPE_CHECKER[self.casts[arg].type](self.casts[arg], arg, self.options[arg]) if isinstance(self.options['log_handler'], basestring): self.options['log_handler'] = self.options['log_handler'].split(',') self.options['log_handler'].extend(opt.log_handler) # if defined but None take the configfile value keys = [ 'language', 'translate_out', 'translate_in', 'overwrite_existing_translations', 'debug_mode', 'smtp_ssl', 'load_language', 'stop_after_init', 'logrotate', 'without_demo', 'xmlrpc', 'syslog', 'list_db', 'xmlrpcs', 'proxy_mode', 'test_file', 'test_enable', 'test_commit', 'test_report_directory', 'osv_memory_count_limit', 'osv_memory_age_limit', 'max_cron_threads', 'unaccent', 'data_dir', ] posix_keys = [ 'auto_reload', 'workers', 'limit_memory_hard', 'limit_memory_soft', 'limit_time_cpu', 'limit_time_real', 'limit_request', ] if os.name == 'posix': keys += posix_keys else: self.options.update(dict.fromkeys(posix_keys, None)) # Copy the command-line arguments... for arg in keys: if getattr(opt, arg) is not None: self.options[arg] = getattr(opt, arg) # ... or keep, but cast, the config file value. elif isinstance(self.options[arg], basestring) and self.casts[arg].type in optparse.Option.TYPE_CHECKER: self.options[arg] = optparse.Option.TYPE_CHECKER[self.casts[arg].type](self.casts[arg], arg, self.options[arg]) self.options['root_path'] = os.path.abspath(os.path.expanduser(os.path.expandvars(os.path.dirname(openerp.__file__)))) if not self.options['addons_path'] or self.options['addons_path']=='None': default_addons = [] base_addons = os.path.join(self.options['root_path'], 'addons') if os.path.exists(base_addons): default_addons.append(base_addons) main_addons = os.path.abspath(os.path.join(self.options['root_path'], '../addons')) if os.path.exists(main_addons): default_addons.append(main_addons) self.options['addons_path'] = ','.join(default_addons) else: self.options['addons_path'] = ",".join( os.path.abspath(os.path.expanduser(os.path.expandvars(x))) for x in self.options['addons_path'].split(',')) self.options['init'] = opt.init and dict.fromkeys(opt.init.split(','), 1) or {} self.options["demo"] = not opt.without_demo and self.options['init'] or {} self.options['update'] = opt.update and dict.fromkeys(opt.update.split(','), 1) or {} self.options['translate_modules'] = opt.translate_modules and map(lambda m: m.strip(), opt.translate_modules.split(',')) or ['all'] self.options['translate_modules'].sort() # TODO checking the type of the parameters should be done for every # parameters, not just the timezone. # The call to get_server_timezone() sets the timezone; this should # probably done here. if self.options['timezone']: # Prevent the timezone to be True. (The config file parsing changes # the string 'True' to the boolean value True. It would be probably # be better to remove that conversion.) die(not isinstance(self.options['timezone'], basestring), "Invalid timezone value in configuration or environment: %r.\n" "Please fix this in your configuration." %(self.options['timezone'])) # If an explicit TZ was provided in the config, make sure it is known try: import pytz pytz.timezone(self.options['timezone']) except pytz.UnknownTimeZoneError: die(True, "The specified timezone (%s) is invalid" % self.options['timezone']) except: # If pytz is missing, don't check the provided TZ, it will be ignored anyway. pass if opt.pg_path: self.options['pg_path'] = opt.pg_path if self.options.get('language', False): if len(self.options['language']) > 5: raise Exception('ERROR: The Lang name must take max 5 chars, Eg: -lfr_BE') if not self.options['db_user']: try: import getpass self.options['db_user'] = getpass.getuser() except: self.options['db_user'] = None die(not self.options['db_user'], 'ERROR: No user specified for the connection to the database') if self.options['db_password']: if sys.platform == 'win32' and not self.options['db_host']: self.options['db_host'] = 'localhost' #if self.options['db_host']: # self._generate_pgpassfile() if opt.save: self.save() openerp.conf.addons_paths = self.options['addons_path'].split(',') if opt.server_wide_modules: openerp.conf.server_wide_modules = map(lambda m: m.strip(), opt.server_wide_modules.split(',')) else: openerp.conf.server_wide_modules = ['web','web_kanban'] def _generate_pgpassfile(self): """ Generate the pgpass file with the parameters from the command line (db_host, db_user, db_password) Used because pg_dump and pg_restore can not accept the password on the command line. """ is_win32 = sys.platform == 'win32' if is_win32: filename = os.path.join(os.environ['APPDATA'], 'pgpass.conf') else: filename = os.path.join(os.environ['HOME'], '.pgpass') text_to_add = "%(db_host)s:*:*:%(db_user)s:%(db_password)s" % self.options if os.path.exists(filename): content = [x.strip() for x in file(filename, 'r').readlines()] if text_to_add in content: return fp = file(filename, 'a+') fp.write(text_to_add + "\n") fp.close() if is_win32: try: import _winreg except ImportError: _winreg = None x=_winreg.ConnectRegistry(None,_winreg.HKEY_LOCAL_MACHINE) y = _winreg.OpenKey(x, r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", 0,_winreg.KEY_ALL_ACCESS) _winreg.SetValueEx(y,"PGPASSFILE", 0, _winreg.REG_EXPAND_SZ, filename ) _winreg.CloseKey(y) _winreg.CloseKey(x) else: import stat os.chmod(filename, stat.S_IRUSR + stat.S_IWUSR) def _is_addons_path(self, path): for f in os.listdir(path): modpath = os.path.join(path, f) if os.path.isdir(modpath): def hasfile(filename): return os.path.isfile(os.path.join(modpath, filename)) if hasfile('__init__.py') and (hasfile('__openerp__.py') or hasfile('__terp__.py')): return True return False def _check_addons_path(self, option, opt, value, parser): ad_paths = [] for path in value.split(','): path = path.strip() res = os.path.abspath(os.path.expanduser(path)) if not os.path.isdir(res): raise optparse.OptionValueError("option %s: no such directory: %r" % (opt, path)) if not self._is_addons_path(res): raise optparse.OptionValueError("option %s: The addons-path %r does not seem to a be a valid Addons Directory!" % (opt, path)) ad_paths.append(res) setattr(parser.values, option.dest, ",".join(ad_paths)) def load(self): p = ConfigParser.ConfigParser() try: p.read([self.rcfile]) for (name,value) in p.items('options'): if value=='True' or value=='true': value = True if value=='False' or value=='false': value = False self.options[name] = value #parse the other sections, as well for sec in p.sections(): if sec == 'options': continue if not self.misc.has_key(sec): self.misc[sec]= {} for (name, value) in p.items(sec): if value=='True' or value=='true': value = True if value=='False' or value=='false': value = False self.misc[sec][name] = value except IOError: pass except ConfigParser.NoSectionError: pass def save(self): p = ConfigParser.ConfigParser() loglevelnames = dict(zip(self._LOGLEVELS.values(), self._LOGLEVELS.keys())) p.add_section('options') for opt in sorted(self.options.keys()): if opt in ('version', 'language', 'translate_out', 'translate_in', 'overwrite_existing_translations', 'init', 'update'): continue if opt in self.blacklist_for_save: continue if opt in ('log_level',): p.set('options', opt, loglevelnames.get(self.options[opt], self.options[opt])) elif opt == 'log_handler': p.set('options', opt, ','.join(_deduplicate_loggers(self.options[opt]))) else: p.set('options', opt, self.options[opt]) for sec in sorted(self.misc.keys()): p.add_section(sec) for opt in sorted(self.misc[sec].keys()): p.set(sec,opt,self.misc[sec][opt]) # try to create the directories and write the file try: rc_exists = os.path.exists(self.rcfile) if not rc_exists and not os.path.exists(os.path.dirname(self.rcfile)): os.makedirs(os.path.dirname(self.rcfile)) try: p.write(file(self.rcfile, 'w')) if not rc_exists: os.chmod(self.rcfile, 0600) except IOError: sys.stderr.write("ERROR: couldn't write the config file\n") except OSError: # what to do if impossible? sys.stderr.write("ERROR: couldn't create the config directory\n") def get(self, key, default=None): return self.options.get(key, default) def get_misc(self, sect, key, default=None): return self.misc.get(sect,{}).get(key, default) def __setitem__(self, key, value): self.options[key] = value if key in self.options and isinstance(self.options[key], basestring) and \ key in self.casts and self.casts[key].type in optparse.Option.TYPE_CHECKER: self.options[key] = optparse.Option.TYPE_CHECKER[self.casts[key].type](self.casts[key], key, self.options[key]) def __getitem__(self, key): return self.options[key] @property def addons_data_dir(self): d = os.path.join(self['data_dir'], 'addons', release.series) if not os.path.exists(d): os.makedirs(d, 0700) else: assert os.access(d, os.W_OK), \ "%s: directory is not writable" % d return d @property def session_dir(self): d = os.path.join(self['data_dir'], 'sessions') if not os.path.exists(d): os.makedirs(d, 0700) else: assert os.access(d, os.W_OK), \ "%s: directory is not writable" % d return d def filestore(self, dbname): return os.path.join(self['data_dir'], 'filestore', dbname) config = configmanager() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
ericfc/django
tests/admin_changelist/admin.py
247
3931
from django.contrib import admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from django.core.paginator import Paginator from .models import Child, Event, Parent, Swallow site = admin.AdminSite(name="admin") site.register(User, UserAdmin) class CustomPaginator(Paginator): def __init__(self, queryset, page_size, orphans=0, allow_empty_first_page=True): super(CustomPaginator, self).__init__(queryset, 5, orphans=2, allow_empty_first_page=allow_empty_first_page) class EventAdmin(admin.ModelAdmin): list_display = ['event_date_func'] def event_date_func(self, event): return event.date site.register(Event, EventAdmin) class ParentAdmin(admin.ModelAdmin): list_filter = ['child__name'] search_fields = ['child__name'] class ChildAdmin(admin.ModelAdmin): list_display = ['name', 'parent'] list_per_page = 10 list_filter = ['parent', 'age'] def get_queryset(self, request): return super(ChildAdmin, self).get_queryset(request).select_related("parent__name") class CustomPaginationAdmin(ChildAdmin): paginator = CustomPaginator class FilteredChildAdmin(admin.ModelAdmin): list_display = ['name', 'parent'] list_per_page = 10 def get_queryset(self, request): return super(FilteredChildAdmin, self).get_queryset(request).filter( name__contains='filtered') class BandAdmin(admin.ModelAdmin): list_filter = ['genres'] class GroupAdmin(admin.ModelAdmin): list_filter = ['members'] class ConcertAdmin(admin.ModelAdmin): list_filter = ['group__members'] search_fields = ['group__members__name'] class QuartetAdmin(admin.ModelAdmin): list_filter = ['members'] class ChordsBandAdmin(admin.ModelAdmin): list_filter = ['members'] class InvitationAdmin(admin.ModelAdmin): list_display = ('band', 'player') list_select_related = ('player',) class DynamicListDisplayChildAdmin(admin.ModelAdmin): list_display = ('parent', 'name', 'age') def get_list_display(self, request): my_list_display = super(DynamicListDisplayChildAdmin, self).get_list_display(request) if request.user.username == 'noparents': my_list_display = list(my_list_display) my_list_display.remove('parent') return my_list_display class DynamicListDisplayLinksChildAdmin(admin.ModelAdmin): list_display = ('parent', 'name', 'age') list_display_links = ['parent', 'name'] def get_list_display_links(self, request, list_display): return ['age'] site.register(Child, DynamicListDisplayChildAdmin) class NoListDisplayLinksParentAdmin(admin.ModelAdmin): list_display_links = None site.register(Parent, NoListDisplayLinksParentAdmin) class SwallowAdmin(admin.ModelAdmin): actions = None # prevent ['action_checkbox'] + list(list_display) list_display = ('origin', 'load', 'speed', 'swallowonetoone') site.register(Swallow, SwallowAdmin) class DynamicListFilterChildAdmin(admin.ModelAdmin): list_filter = ('parent', 'name', 'age') def get_list_filter(self, request): my_list_filter = super(DynamicListFilterChildAdmin, self).get_list_filter(request) if request.user.username == 'noparents': my_list_filter = list(my_list_filter) my_list_filter.remove('parent') return my_list_filter class DynamicSearchFieldsChildAdmin(admin.ModelAdmin): search_fields = ('name',) def get_search_fields(self, request): search_fields = super(DynamicSearchFieldsChildAdmin, self).get_search_fields(request) search_fields += ('age',) return search_fields class EmptyValueChildAdmin(admin.ModelAdmin): empty_value_display = '-empty-' list_display = ('name', 'age_display', 'age') def age_display(self, obj): return obj.age age_display.empty_value_display = '&dagger;'
bsd-3-clause
home-assistant/home-assistant
tests/components/logi_circle/test_config_flow.py
2
7027
"""Tests for Logi Circle config flow.""" import asyncio from unittest.mock import AsyncMock, Mock, patch import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.logi_circle import config_flow from homeassistant.components.logi_circle.config_flow import ( DOMAIN, AuthorizationFailed, LogiCircleAuthCallbackView, ) from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro class MockRequest: """Mock request passed to HomeAssistantView.""" def __init__(self, hass, query): """Init request object.""" self.app = {"hass": hass} self.query = query def init_config_flow(hass): """Init a configuration flow.""" config_flow.register_flow_implementation( hass, DOMAIN, client_id="id", client_secret="secret", api_key="123", redirect_uri="http://example.com", sensors=None, ) flow = config_flow.LogiCircleFlowHandler() flow._get_authorization_url = Mock( # pylint: disable=protected-access return_value="http://example.com" ) flow.hass = hass return flow @pytest.fixture def mock_logi_circle(): """Mock logi_circle.""" with patch( "homeassistant.components.logi_circle.config_flow.LogiCircle" ) as logi_circle: LogiCircle = logi_circle() LogiCircle.authorize = AsyncMock(return_value=True) LogiCircle.close = AsyncMock(return_value=True) LogiCircle.account = mock_coro(return_value={"accountId": "testId"}) LogiCircle.authorize_url = "http://authorize.url" yield LogiCircle async def test_step_import( hass, mock_logi_circle # pylint: disable=redefined-outer-name ): """Test that we trigger import when configuring with client.""" flow = init_config_flow(hass) result = await flow.async_step_import() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "auth" async def test_full_flow_implementation( hass, mock_logi_circle # pylint: disable=redefined-outer-name ): """Test registering an implementation and finishing flow works.""" config_flow.register_flow_implementation( hass, "test-other", client_id=None, client_secret=None, api_key=None, redirect_uri=None, sensors=None, ) flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await flow.async_step_user({"flow_impl": "test-other"}) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "auth" assert result["description_placeholders"] == { "authorization_url": "http://example.com" } result = await flow.async_step_code("123ABC") assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "Logi Circle ({})".format("testId") async def test_we_reprompt_user_to_follow_link(hass): """Test we prompt user to follow link if previously prompted.""" flow = init_config_flow(hass) result = await flow.async_step_auth("dummy") assert result["errors"]["base"] == "follow_link" async def test_abort_if_no_implementation_registered(hass): """Test we abort if no implementation is registered.""" flow = config_flow.LogiCircleFlowHandler() flow.hass = hass result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "missing_configuration" async def test_abort_if_already_setup(hass): """Test we abort if Logi Circle is already setup.""" flow = init_config_flow(hass) MockConfigEntry(domain=config_flow.DOMAIN).add_to_hass(hass) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" with pytest.raises(data_entry_flow.AbortFlow): result = await flow.async_step_code() result = await flow.async_step_auth() assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "external_setup" @pytest.mark.parametrize( "side_effect,error", [ (asyncio.TimeoutError, "authorize_url_timeout"), (AuthorizationFailed, "invalid_auth"), ], ) async def test_abort_if_authorize_fails( hass, mock_logi_circle, side_effect, error ): # pylint: disable=redefined-outer-name """Test we abort if authorizing fails.""" flow = init_config_flow(hass) mock_logi_circle.authorize.side_effect = side_effect result = await flow.async_step_code("123ABC") assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "external_error" result = await flow.async_step_auth() assert result["errors"]["base"] == error async def test_not_pick_implementation_if_only_one(hass): """Test we bypass picking implementation if we have one flow_imp.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "auth" async def test_gen_auth_url( hass, mock_logi_circle ): # pylint: disable=redefined-outer-name """Test generating authorize URL from Logi Circle API.""" config_flow.register_flow_implementation( hass, "test-auth-url", client_id="id", client_secret="secret", api_key="123", redirect_uri="http://example.com", sensors=None, ) flow = config_flow.LogiCircleFlowHandler() flow.hass = hass flow.flow_impl = "test-auth-url" await async_setup_component(hass, "http", {}) result = flow._get_authorization_url() # pylint: disable=protected-access assert result == "http://authorize.url" async def test_callback_view_rejects_missing_code(hass): """Test the auth callback view rejects requests with no code.""" view = LogiCircleAuthCallbackView() resp = await view.get(MockRequest(hass, {})) assert resp.status == 400 async def test_callback_view_accepts_code( hass, mock_logi_circle ): # pylint: disable=redefined-outer-name """Test the auth callback view handles requests with auth code.""" init_config_flow(hass) view = LogiCircleAuthCallbackView() resp = await view.get(MockRequest(hass, {"code": "456"})) assert resp.status == 200 await hass.async_block_till_done() mock_logi_circle.authorize.assert_called_with("456")
apache-2.0
tinkhaven-organization/odoo
addons/portal_claim/__openerp__.py
432
1643
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Portal Claim', 'version': '0.1', 'category': 'Tools', 'complexity': 'easy', 'description': """ This module adds claim menu and features to your portal if claim and portal are installed. ========================================================================================== """, 'author': 'OpenERP SA', 'depends': ['crm_claim','portal'], 'data': [ 'portal_claim_view.xml', 'security/ir.model.access.csv', 'security/portal_security.xml', ], 'installable': True, 'auto_install': True, 'category': 'Hidden', } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
overcastcloud/trollius
trollius/base_events.py
1
44639
"""Base implementation of event loop. The event loop can be broken up into a multiplexer (the part responsible for notifying us of I/O events) and the event loop proper, which wraps a multiplexer with functionality for scheduling callbacks, immediately or at a given time in the future. Whenever a public API takes a callback, subsequent positional arguments will be passed to the callback if/when it is called. This avoids the proliferation of trivial lambdas implementing closures. Keyword arguments for the callback are not supported; this is a conscious design decision, leaving the door open for keyword arguments to modify the meaning of the API call itself. """ import collections import heapq import inspect import logging import os import socket import subprocess import sys import traceback try: from collections import OrderedDict except ImportError: # Python 2.6: use ordereddict backport from ordereddict import OrderedDict try: from threading import get_ident as _get_thread_ident except ImportError: # Python 2 from threading import _get_ident as _get_thread_ident from . import compat from . import coroutines from . import events from . import futures from . import tasks from .coroutines import coroutine, From, Return from .executor import get_default_executor from .log import logger from .time_monotonic import time_monotonic, time_monotonic_resolution __all__ = ['BaseEventLoop'] # Argument for default thread pool executor creation. _MAX_WORKERS = 5 # Minimum number of _scheduled timer handles before cleanup of # cancelled handles is performed. _MIN_SCHEDULED_TIMER_HANDLES = 100 # Minimum fraction of _scheduled timer handles that are cancelled # before cleanup of cancelled handles is performed. _MIN_CANCELLED_TIMER_HANDLES_FRACTION = 0.5 def _format_handle(handle): cb = handle._callback if inspect.ismethod(cb) and isinstance(cb.__self__, tasks.Task): # format the task return repr(cb.__self__) else: return str(handle) def _format_pipe(fd): if fd == subprocess.PIPE: return '<pipe>' elif fd == subprocess.STDOUT: return '<stdout>' else: return repr(fd) class _StopError(BaseException): """Raised to stop the event loop.""" def _check_resolved_address(sock, address): # Ensure that the address is already resolved to avoid the trap of hanging # the entire event loop when the address requires doing a DNS lookup. family = sock.family if family == socket.AF_INET: host, port = address elif family == socket.AF_INET6: host, port = address[:2] else: return type_mask = 0 if hasattr(socket, 'SOCK_NONBLOCK'): type_mask |= socket.SOCK_NONBLOCK if hasattr(socket, 'SOCK_CLOEXEC'): type_mask |= socket.SOCK_CLOEXEC # Use getaddrinfo(flags=AI_NUMERICHOST) to ensure that the address is # already resolved. try: socket.getaddrinfo(host, port, family, (sock.type & ~type_mask), sock.proto, socket.AI_NUMERICHOST) except socket.gaierror as err: raise ValueError("address must be resolved (IP address), got %r: %s" % (address, err)) def _raise_stop_error(*args): raise _StopError def _run_until_complete_cb(fut): exc = fut._exception if (isinstance(exc, BaseException) and not isinstance(exc, Exception)): # Issue #22429: run_forever() already finished, no need to # stop it. return _raise_stop_error() class Server(events.AbstractServer): def __init__(self, loop, sockets): self._loop = loop self.sockets = sockets self._active_count = 0 self._waiters = [] def __repr__(self): return '<%s sockets=%r>' % (self.__class__.__name__, self.sockets) def _attach(self): assert self.sockets is not None self._active_count += 1 def _detach(self): assert self._active_count > 0 self._active_count -= 1 if self._active_count == 0 and self.sockets is None: self._wakeup() def close(self): sockets = self.sockets if sockets is None: return self.sockets = None for sock in sockets: self._loop._stop_serving(sock) if self._active_count == 0: self._wakeup() def _wakeup(self): waiters = self._waiters self._waiters = None for waiter in waiters: if not waiter.done(): waiter.set_result(waiter) @coroutine def wait_closed(self): if self.sockets is None or self._waiters is None: raise Return() waiter = futures.Future(loop=self._loop) self._waiters.append(waiter) yield From(waiter) class BaseEventLoop(events.AbstractEventLoop): def __init__(self): self._timer_cancelled_count = 0 self._closed = False self._ready = collections.deque() self._scheduled = [] self._default_executor = None self._internal_fds = 0 # Identifier of the thread running the event loop, or None if the # event loop is not running self._owner = None self._clock_resolution = time_monotonic_resolution self._exception_handler = None self._debug = bool(os.environ.get('TROLLIUSDEBUG')) # In debug mode, if the execution of a callback or a step of a task # exceed this duration in seconds, the slow callback/task is logged. self.slow_callback_duration = 0.1 self._current_handle = None def __repr__(self): return ('<%s running=%s closed=%s debug=%s>' % (self.__class__.__name__, self.is_running(), self.is_closed(), self.get_debug())) def create_task(self, coro): """Schedule a coroutine object. Return a task object. """ self._check_closed() task = tasks.Task(coro, loop=self) if task._source_traceback: del task._source_traceback[-1] return task def _make_socket_transport(self, sock, protocol, waiter=None, extra=None, server=None): """Create socket transport.""" raise NotImplementedError def _make_ssl_transport(self, rawsock, protocol, sslcontext, waiter=None, server_side=False, server_hostname=None, extra=None, server=None): """Create SSL transport.""" raise NotImplementedError def _make_datagram_transport(self, sock, protocol, address=None, waiter=None, extra=None): """Create datagram transport.""" raise NotImplementedError def _make_read_pipe_transport(self, pipe, protocol, waiter=None, extra=None): """Create read pipe transport.""" raise NotImplementedError def _make_write_pipe_transport(self, pipe, protocol, waiter=None, extra=None): """Create write pipe transport.""" raise NotImplementedError @coroutine def _make_subprocess_transport(self, protocol, args, shell, stdin, stdout, stderr, bufsize, extra=None, **kwargs): """Create subprocess transport.""" raise NotImplementedError def _write_to_self(self): """Write a byte to self-pipe, to wake up the event loop. This may be called from a different thread. The subclass is responsible for implementing the self-pipe. """ raise NotImplementedError def _process_events(self, event_list): """Process selector events.""" raise NotImplementedError def _check_closed(self): if self._closed: raise RuntimeError('Event loop is closed') def run_forever(self): """Run until stop() is called.""" self._check_closed() if self.is_running(): raise RuntimeError('Event loop is running.') self._owner = _get_thread_ident() try: while True: try: self._run_once() except _StopError: break finally: self._owner = None def run_until_complete(self, future): """Run until the Future is done. If the argument is a coroutine, it is wrapped in a Task. WARNING: It would be disastrous to call run_until_complete() with the same coroutine twice -- it would wrap it in two different Tasks and that can't be good. Return the Future's result, or raise its exception. """ self._check_closed() new_task = not isinstance(future, futures._FUTURE_CLASSES) future = tasks.async(future, loop=self) if new_task: # An exception is raised if the future didn't complete, so there # is no need to log the "destroy pending task" message future._log_destroy_pending = False future.add_done_callback(_run_until_complete_cb) try: self.run_forever() except: if new_task and future.done() and not future.cancelled(): # The coroutine raised a BaseException. Consume the exception # to not log a warning, the caller doesn't have access to the # local task. future.exception() raise future.remove_done_callback(_run_until_complete_cb) if not future.done(): raise RuntimeError('Event loop stopped before Future completed.') return future.result() def stop(self): """Stop running the event loop. Every callback scheduled before stop() is called will run. Callbacks scheduled after stop() is called will not run. However, those callbacks will run if run_forever is called again later. """ self.call_soon(_raise_stop_error) def close(self): """Close the event loop. This clears the queues and shuts down the executor, but does not wait for the executor to finish. The event loop must not be running. """ if self.is_running(): raise RuntimeError("Cannot close a running event loop") if self._closed: return if self._debug: logger.debug("Close %r", self) self._closed = True self._ready.clear() del self._scheduled[:] executor = self._default_executor if executor is not None: self._default_executor = None executor.shutdown(wait=False) def is_closed(self): """Returns True if the event loop was closed.""" return self._closed def is_running(self): """Returns True if the event loop is running.""" return (self._owner is not None) def time(self): """Return the time according to the event loop's clock. This is a float expressed in seconds since an epoch, but the epoch, precision, accuracy and drift are unspecified and may differ per event loop. """ return time_monotonic() def call_later(self, delay, callback, *args): """Arrange for a callback to be called at a given time. Return a Handle: an opaque object with a cancel() method that can be used to cancel the call. The delay can be an int or float, expressed in seconds. It is always relative to the current time. Each callback will be called exactly once. If two callbacks are scheduled for exactly the same time, it undefined which will be called first. Any positional arguments after the callback will be passed to the callback when it is called. """ timer = self.call_at(self.time() + delay, callback, *args) if timer._source_traceback: del timer._source_traceback[-1] return timer def call_at(self, when, callback, *args): """Like call_later(), but uses an absolute time. Absolute time corresponds to the event loop's time() method. """ if (coroutines.iscoroutine(callback) or coroutines.iscoroutinefunction(callback)): raise TypeError("coroutines cannot be used with call_at()") self._check_closed() if self._debug: self._check_thread() timer = events.TimerHandle(when, callback, args, self) if timer._source_traceback: del timer._source_traceback[-1] heapq.heappush(self._scheduled, timer) timer._scheduled = True return timer def call_soon(self, callback, *args): """Arrange for a callback to be called as soon as possible. This operates as a FIFO queue: callbacks are called in the order in which they are registered. Each callback will be called exactly once. Any positional arguments after the callback will be passed to the callback when it is called. """ if self._debug: self._check_thread() handle = self._call_soon(callback, args) if handle._source_traceback: del handle._source_traceback[-1] return handle def _call_soon(self, callback, args): if (coroutines.iscoroutine(callback) or coroutines.iscoroutinefunction(callback)): raise TypeError("coroutines cannot be used with call_soon()") self._check_closed() handle = events.Handle(callback, args, self) if handle._source_traceback: del handle._source_traceback[-1] self._ready.append(handle) return handle def _check_thread(self): """Check that the current thread is the thread running the event loop. Non-thread-safe methods of this class make this assumption and will likely behave incorrectly when the assumption is violated. Should only be called when (self._debug == True). The caller is responsible for checking this condition for performance reasons. """ if self._owner is None: return thread_id = _get_thread_ident() if thread_id != self._owner: raise RuntimeError( "Non-thread-safe operation invoked on an event loop other " "than the current one") def call_soon_threadsafe(self, callback, *args): """Like call_soon(), but thread-safe.""" handle = self._call_soon(callback, args) if handle._source_traceback: del handle._source_traceback[-1] self._write_to_self() return handle def run_in_executor(self, executor, callback, *args): if (coroutines.iscoroutine(callback) or coroutines.iscoroutinefunction(callback)): raise TypeError("coroutines cannot be used with run_in_executor()") self._check_closed() if isinstance(callback, events.Handle): assert not args assert not isinstance(callback, events.TimerHandle) if callback._cancelled: f = futures.Future(loop=self) f.set_result(None) return f callback, args = callback._callback, callback._args if executor is None: executor = self._default_executor if executor is None: executor = get_default_executor() self._default_executor = executor return futures.wrap_future(executor.submit(callback, *args), loop=self) def set_default_executor(self, executor): self._default_executor = executor def _getaddrinfo_debug(self, host, port, family, type, proto, flags): msg = ["%s:%r" % (host, port)] if family: msg.append('family=%r' % family) if type: msg.append('type=%r' % type) if proto: msg.append('proto=%r' % proto) if flags: msg.append('flags=%r' % flags) msg = ', '.join(msg) logger.debug('Get address info %s', msg) t0 = self.time() addrinfo = socket.getaddrinfo(host, port, family, type, proto, flags) dt = self.time() - t0 msg = ('Getting address info %s took %.3f ms: %r' % (msg, dt * 1e3, addrinfo)) if dt >= self.slow_callback_duration: logger.info(msg) else: logger.debug(msg) return addrinfo def getaddrinfo(self, host, port, family=0, type=0, proto=0, flags=0): if self._debug: return self.run_in_executor(None, self._getaddrinfo_debug, host, port, family, type, proto, flags) else: return self.run_in_executor(None, socket.getaddrinfo, host, port, family, type, proto, flags) def getnameinfo(self, sockaddr, flags=0): return self.run_in_executor(None, socket.getnameinfo, sockaddr, flags) @coroutine def create_connection(self, protocol_factory, host=None, port=None, ssl=None, family=0, proto=0, flags=0, sock=None, local_addr=None, server_hostname=None): """Connect to a TCP server. Create a streaming transport connection to a given Internet host and port: socket family AF_INET or socket.AF_INET6 depending on host (or family if specified), socket type SOCK_STREAM. protocol_factory must be a callable returning a protocol instance. This method is a coroutine which will try to establish the connection in the background. When successful, the coroutine returns a (transport, protocol) pair. """ if server_hostname is not None and not ssl: raise ValueError('server_hostname is only meaningful with ssl') if server_hostname is None and ssl: # Use host as default for server_hostname. It is an error # if host is empty or not set, e.g. when an # already-connected socket was passed or when only a port # is given. To avoid this error, you can pass # server_hostname='' -- this will bypass the hostname # check. (This also means that if host is a numeric # IP/IPv6 address, we will attempt to verify that exact # address; this will probably fail, but it is possible to # create a certificate for a specific IP address, so we # don't judge it here.) if not host: raise ValueError('You must set server_hostname ' 'when using ssl without a host') server_hostname = host if host is not None or port is not None: if sock is not None: raise ValueError( 'host/port and sock can not be specified at the same time') f1 = self.getaddrinfo( host, port, family=family, type=socket.SOCK_STREAM, proto=proto, flags=flags) fs = [f1] if local_addr is not None: f2 = self.getaddrinfo( *local_addr, family=family, type=socket.SOCK_STREAM, proto=proto, flags=flags) fs.append(f2) else: f2 = None yield From(tasks.wait(fs, loop=self)) infos = f1.result() if not infos: raise socket.error('getaddrinfo() returned empty list') if f2 is not None: laddr_infos = f2.result() if not laddr_infos: raise socket.error('getaddrinfo() returned empty list') exceptions = [] for family, type, proto, cname, address in infos: try: sock = socket.socket(family=family, type=type, proto=proto) sock.setblocking(False) if f2 is not None: for _, _, _, _, laddr in laddr_infos: try: sock.bind(laddr) break except socket.error as exc: exc = socket.error( exc.errno, 'error while ' 'attempting to bind on address ' '{0!r}: {1}'.format( laddr, exc.strerror.lower())) exceptions.append(exc) else: sock.close() sock = None continue if self._debug: logger.debug("connect %r to %r", sock, address) yield From(self.sock_connect(sock, address)) except socket.error as exc: if sock is not None: sock.close() exceptions.append(exc) except: if sock is not None: sock.close() raise else: break else: if len(exceptions) == 1: raise exceptions[0] else: # If they all have the same str(), raise one. model = str(exceptions[0]) if all(str(exc) == model for exc in exceptions): raise exceptions[0] # Raise a combined exception so the user can see all # the various error messages. raise socket.error('Multiple exceptions: {0}'.format( ', '.join(str(exc) for exc in exceptions))) elif sock is None: raise ValueError( 'host and port was not specified and no sock specified') sock.setblocking(False) transport, protocol = yield From(self._create_connection_transport( sock, protocol_factory, ssl, server_hostname)) if self._debug: # Get the socket from the transport because SSL transport closes # the old socket and creates a new SSL socket sock = transport.get_extra_info('socket') logger.debug("%r connected to %s:%r: (%r, %r)", sock, host, port, transport, protocol) raise Return(transport, protocol) @coroutine def _create_connection_transport(self, sock, protocol_factory, ssl, server_hostname): protocol = protocol_factory() waiter = futures.Future(loop=self) if ssl: sslcontext = None if isinstance(ssl, bool) else ssl transport = self._make_ssl_transport( sock, protocol, sslcontext, waiter, server_side=False, server_hostname=server_hostname) else: transport = self._make_socket_transport(sock, protocol, waiter) try: yield From(waiter) except: transport.close() raise raise Return(transport, protocol) @coroutine def create_datagram_endpoint(self, protocol_factory, local_addr=None, remote_addr=None, family=0, proto=0, flags=0): """Create datagram connection.""" if not (local_addr or remote_addr): if family == 0: raise ValueError('unexpected address family') addr_pairs_info = (((family, proto), (None, None)),) else: # join address by (family, protocol) addr_infos = OrderedDict() for idx, addr in ((0, local_addr), (1, remote_addr)): if addr is not None: assert isinstance(addr, tuple) and len(addr) == 2, ( '2-tuple is expected') infos = yield From(self.getaddrinfo( *addr, family=family, type=socket.SOCK_DGRAM, proto=proto, flags=flags)) if not infos: raise socket.error('getaddrinfo() returned empty list') for fam, _, pro, _, address in infos: key = (fam, pro) if key not in addr_infos: addr_infos[key] = [None, None] addr_infos[key][idx] = address # each addr has to have info for each (family, proto) pair addr_pairs_info = [ (key, addr_pair) for key, addr_pair in addr_infos.items() if not ((local_addr and addr_pair[0] is None) or (remote_addr and addr_pair[1] is None))] if not addr_pairs_info: raise ValueError('can not get address information') exceptions = [] for ((family, proto), (local_address, remote_address)) in addr_pairs_info: sock = None r_addr = None try: sock = socket.socket( family=family, type=socket.SOCK_DGRAM, proto=proto) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setblocking(False) if local_addr: sock.bind(local_address) if remote_addr: yield From(self.sock_connect(sock, remote_address)) r_addr = remote_address except socket.error as exc: if sock is not None: sock.close() exceptions.append(exc) except: if sock is not None: sock.close() raise else: break else: raise exceptions[0] protocol = protocol_factory() waiter = futures.Future(loop=self) transport = self._make_datagram_transport(sock, protocol, r_addr, waiter) if self._debug: if local_addr: logger.info("Datagram endpoint local_addr=%r remote_addr=%r " "created: (%r, %r)", local_addr, remote_addr, transport, protocol) else: logger.debug("Datagram endpoint remote_addr=%r created: " "(%r, %r)", remote_addr, transport, protocol) try: yield From(waiter) except: transport.close() raise raise Return(transport, protocol) @coroutine def create_server(self, protocol_factory, host=None, port=None, family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE, sock=None, backlog=100, ssl=None, reuse_address=None): """Create a TCP server bound to host and port. Return a Server object which can be used to stop the service. This method is a coroutine. """ if isinstance(ssl, bool): raise TypeError('ssl argument must be an SSLContext or None') if host is not None or port is not None: if sock is not None: raise ValueError( 'host/port and sock can not be specified at the same time') AF_INET6 = getattr(socket, 'AF_INET6', 0) if reuse_address is None: reuse_address = os.name == 'posix' and sys.platform != 'cygwin' sockets = [] if host == '': host = None infos = yield From(self.getaddrinfo( host, port, family=family, type=socket.SOCK_STREAM, proto=0, flags=flags)) if not infos: raise socket.error('getaddrinfo() returned empty list') completed = False try: for res in infos: af, socktype, proto, canonname, sa = res try: sock = socket.socket(af, socktype, proto) except socket.error: # Assume it's a bad family/type/protocol combination. if self._debug: logger.warning('create_server() failed to create ' 'socket.socket(%r, %r, %r)', af, socktype, proto, exc_info=True) continue sockets.append(sock) if reuse_address: sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) # Disable IPv4/IPv6 dual stack support (enabled by # default on Linux) which makes a single socket # listen on both address families. if af == AF_INET6 and hasattr(socket, 'IPPROTO_IPV6'): sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, True) try: sock.bind(sa) except socket.error as err: raise socket.error(err.errno, 'error while attempting ' 'to bind on address %r: %s' % (sa, err.strerror.lower())) completed = True finally: if not completed: for sock in sockets: sock.close() else: if sock is None: raise ValueError('Neither host/port nor sock were specified') sockets = [sock] server = Server(self, sockets) for sock in sockets: sock.listen(backlog) sock.setblocking(False) self._start_serving(protocol_factory, sock, ssl, server) if self._debug: logger.info("%r is serving", server) raise Return(server) @coroutine def connect_read_pipe(self, protocol_factory, pipe): protocol = protocol_factory() waiter = futures.Future(loop=self) transport = self._make_read_pipe_transport(pipe, protocol, waiter) try: yield From(waiter) except: transport.close() raise if self._debug: logger.debug('Read pipe %r connected: (%r, %r)', pipe.fileno(), transport, protocol) raise Return(transport, protocol) @coroutine def connect_write_pipe(self, protocol_factory, pipe): protocol = protocol_factory() waiter = futures.Future(loop=self) transport = self._make_write_pipe_transport(pipe, protocol, waiter) try: yield From(waiter) except: transport.close() raise if self._debug: logger.debug('Write pipe %r connected: (%r, %r)', pipe.fileno(), transport, protocol) raise Return(transport, protocol) def _log_subprocess(self, msg, stdin, stdout, stderr): info = [msg] if stdin is not None: info.append('stdin=%s' % _format_pipe(stdin)) if stdout is not None and stderr == subprocess.STDOUT: info.append('stdout=stderr=%s' % _format_pipe(stdout)) else: if stdout is not None: info.append('stdout=%s' % _format_pipe(stdout)) if stderr is not None: info.append('stderr=%s' % _format_pipe(stderr)) logger.debug(' '.join(info)) @coroutine def subprocess_shell(self, protocol_factory, cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=False, shell=True, bufsize=0, **kwargs): if not isinstance(cmd, compat.string_types): raise ValueError("cmd must be a string") if universal_newlines: raise ValueError("universal_newlines must be False") if not shell: raise ValueError("shell must be True") if bufsize != 0: raise ValueError("bufsize must be 0") protocol = protocol_factory() if self._debug: # don't log parameters: they may contain sensitive information # (password) and may be too long debug_log = 'run shell command %r' % cmd self._log_subprocess(debug_log, stdin, stdout, stderr) transport = yield From(self._make_subprocess_transport( protocol, cmd, True, stdin, stdout, stderr, bufsize, **kwargs)) if self._debug: logger.info('%s: %r' % (debug_log, transport)) raise Return(transport, protocol) @coroutine def subprocess_exec(self, protocol_factory, program, *args, **kwargs): stdin = kwargs.pop('stdin', subprocess.PIPE) stdout = kwargs.pop('stdout', subprocess.PIPE) stderr = kwargs.pop('stderr', subprocess.PIPE) universal_newlines = kwargs.pop('universal_newlines', False) shell = kwargs.pop('shell', False) bufsize = kwargs.pop('bufsize', 0) if universal_newlines: raise ValueError("universal_newlines must be False") if shell: raise ValueError("shell must be False") if bufsize != 0: raise ValueError("bufsize must be 0") popen_args = (program,) + args for arg in popen_args: if not isinstance(arg, compat.string_types ): raise TypeError("program arguments must be " "a bytes or text string, not %s" % type(arg).__name__) protocol = protocol_factory() if self._debug: # don't log parameters: they may contain sensitive information # (password) and may be too long debug_log = 'execute program %r' % program self._log_subprocess(debug_log, stdin, stdout, stderr) transport = yield From(self._make_subprocess_transport( protocol, popen_args, False, stdin, stdout, stderr, bufsize, **kwargs)) if self._debug: logger.info('%s: %r' % (debug_log, transport)) raise Return(transport, protocol) def set_exception_handler(self, handler): """Set handler as the new event loop exception handler. If handler is None, the default exception handler will be set. If handler is a callable object, it should have a signature matching '(loop, context)', where 'loop' will be a reference to the active event loop, 'context' will be a dict object (see `call_exception_handler()` documentation for details about context). """ if handler is not None and not callable(handler): raise TypeError('A callable object or None is expected, ' 'got {0!r}'.format(handler)) self._exception_handler = handler def default_exception_handler(self, context): """Default exception handler. This is called when an exception occurs and no exception handler is set, and can be called by a custom exception handler that wants to defer to the default behavior. The context parameter has the same meaning as in `call_exception_handler()`. """ message = context.get('message') if not message: message = 'Unhandled exception in event loop' exception = context.get('exception') if exception is not None: if hasattr(exception, '__traceback__'): # Python 3 tb = exception.__traceback__ else: # call_exception_handler() is usually called indirectly # from an except block. If it's not the case, the traceback # is undefined... tb = sys.exc_info()[2] exc_info = (type(exception), exception, tb) else: exc_info = False if (self._current_handle is not None and self._current_handle._source_traceback): context['handle_traceback'] = self._current_handle._source_traceback log_lines = [message] for key in sorted(context): if key in ('message', 'exception'): continue value = context[key] if key == 'source_traceback': tb = ''.join(traceback.format_list(value)) value = 'Object created at (most recent call last):\n' value += tb.rstrip() elif key == 'handle_traceback': tb = ''.join(traceback.format_list(value)) value = 'Handle created at (most recent call last):\n' value += tb.rstrip() else: value = repr(value) log_lines.append('{0}: {1}'.format(key, value)) logger.error('\n'.join(log_lines), exc_info=exc_info) def call_exception_handler(self, context): """Call the current event loop's exception handler. The context argument is a dict containing the following keys: - 'message': Error message; - 'exception' (optional): Exception object; - 'future' (optional): Future instance; - 'handle' (optional): Handle instance; - 'protocol' (optional): Protocol instance; - 'transport' (optional): Transport instance; - 'socket' (optional): Socket instance. New keys maybe introduced in the future. Note: do not overload this method in an event loop subclass. For custom exception handling, use the `set_exception_handler()` method. """ if self._exception_handler is None: try: self.default_exception_handler(context) except Exception: # Second protection layer for unexpected errors # in the default implementation, as well as for subclassed # event loops with overloaded "default_exception_handler". logger.error('Exception in default exception handler', exc_info=True) else: try: self._exception_handler(self, context) except Exception as exc: # Exception in the user set custom exception handler. try: # Let's try default handler. self.default_exception_handler({ 'message': 'Unhandled error in exception handler', 'exception': exc, 'context': context, }) except Exception: # Guard 'default_exception_handler' in case it is # overloaded. logger.error('Exception in default exception handler ' 'while handling an unexpected error ' 'in custom exception handler', exc_info=True) def _add_callback(self, handle): """Add a Handle to _scheduled (TimerHandle) or _ready.""" assert isinstance(handle, events.Handle), 'A Handle is required here' if handle._cancelled: return assert not isinstance(handle, events.TimerHandle) self._ready.append(handle) def _add_callback_signalsafe(self, handle): """Like _add_callback() but called from a signal handler.""" self._add_callback(handle) self._write_to_self() def _timer_handle_cancelled(self, handle): """Notification that a TimerHandle has been cancelled.""" if handle._scheduled: self._timer_cancelled_count += 1 def _run_once(self): """Run one full iteration of the event loop. This calls all currently ready callbacks, polls for I/O, schedules the resulting callbacks, and finally schedules 'call_later' callbacks. """ sched_count = len(self._scheduled) if (sched_count > _MIN_SCHEDULED_TIMER_HANDLES and float(self._timer_cancelled_count) / sched_count > _MIN_CANCELLED_TIMER_HANDLES_FRACTION): # Remove delayed calls that were cancelled if their number # is too high new_scheduled = [] for handle in self._scheduled: if handle._cancelled: handle._scheduled = False else: new_scheduled.append(handle) heapq.heapify(new_scheduled) self._scheduled = new_scheduled self._timer_cancelled_count = 0 else: # Remove delayed calls that were cancelled from head of queue. while self._scheduled and self._scheduled[0]._cancelled: self._timer_cancelled_count -= 1 handle = heapq.heappop(self._scheduled) handle._scheduled = False timeout = None if self._ready: timeout = 0 elif self._scheduled: # Compute the desired timeout. when = self._scheduled[0]._when timeout = max(0, when - self.time()) if self._debug and timeout != 0: t0 = self.time() event_list = self._selector.select(timeout) dt = self.time() - t0 if dt >= 1.0: level = logging.INFO else: level = logging.DEBUG nevent = len(event_list) if timeout is None: logger.log(level, 'poll took %.3f ms: %s events', dt * 1e3, nevent) elif nevent: logger.log(level, 'poll %.3f ms took %.3f ms: %s events', timeout * 1e3, dt * 1e3, nevent) elif dt >= 1.0: logger.log(level, 'poll %.3f ms took %.3f ms: timeout', timeout * 1e3, dt * 1e3) else: event_list = self._selector.select(timeout) self._process_events(event_list) # Handle 'later' callbacks that are ready. end_time = self.time() + self._clock_resolution while self._scheduled: handle = self._scheduled[0] if handle._when >= end_time: break handle = heapq.heappop(self._scheduled) handle._scheduled = False self._ready.append(handle) # This is the only place where callbacks are actually *called*. # All other places just add them to ready. # Note: We run all currently scheduled callbacks, but not any # callbacks scheduled by callbacks run this time around -- # they will be run the next time (after another I/O poll). # Use an idiom that is thread-safe without using locks. ntodo = len(self._ready) for i in range(ntodo): handle = self._ready.popleft() if handle._cancelled: continue if self._debug: try: self._current_handle = handle t0 = self.time() handle._run() dt = self.time() - t0 if dt >= self.slow_callback_duration: logger.warning('Executing %s took %.3f seconds', _format_handle(handle), dt) finally: self._current_handle = None else: handle._run() handle = None # Needed to break cycles when an exception occurs. def get_debug(self): return self._debug def set_debug(self, enabled): self._debug = enabled
apache-2.0
jeremiahmarks/sl4a
python/src/Lib/lib2to3/fixes/fix_print.py
53
2957
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for print. Change: 'print' into 'print()' 'print ...' into 'print(...)' 'print ... ,' into 'print(..., end=" ")' 'print >>x, ...' into 'print(..., file=x)' No changes are applied if print_function is imported from __future__ """ # Local imports from .. import patcomp from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Name, Call, Comma, String, is_tuple parend_expr = patcomp.compile_pattern( """atom< '(' [atom|STRING|NAME] ')' >""" ) class FixPrint(fixer_base.ConditionalFix): PATTERN = """ simple_stmt< any* bare='print' any* > | print_stmt """ skip_on = '__future__.print_function' def transform(self, node, results): assert results if self.should_skip(node): return bare_print = results.get("bare") if bare_print: # Special-case print all by itself bare_print.replace(Call(Name("print"), [], prefix=bare_print.get_prefix())) return assert node.children[0] == Name("print") args = node.children[1:] if len(args) == 1 and parend_expr.match(args[0]): # We don't want to keep sticking parens around an # already-parenthesised expression. return sep = end = file = None if args and args[-1] == Comma(): args = args[:-1] end = " " if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, ">>"): assert len(args) >= 2 file = args[1].clone() args = args[3:] # Strip a possible comma after the file expression # Now synthesize a print(args, sep=..., end=..., file=...) node. l_args = [arg.clone() for arg in args] if l_args: l_args[0].set_prefix("") if sep is not None or end is not None or file is not None: if sep is not None: self.add_kwarg(l_args, "sep", String(repr(sep))) if end is not None: self.add_kwarg(l_args, "end", String(repr(end))) if file is not None: self.add_kwarg(l_args, "file", file) n_stmt = Call(Name("print"), l_args) n_stmt.set_prefix(node.get_prefix()) return n_stmt def add_kwarg(self, l_nodes, s_kwd, n_expr): # XXX All this prefix-setting may lose comments (though rarely) n_expr.set_prefix("") n_argument = pytree.Node(self.syms.argument, (Name(s_kwd), pytree.Leaf(token.EQUAL, "="), n_expr)) if l_nodes: l_nodes.append(Comma()) n_argument.set_prefix(" ") l_nodes.append(n_argument)
apache-2.0
aliyun/oss-ftp
python27/win32/Lib/site-packages/cryptography/hazmat/primitives/hmac.py
61
2353
# This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils from cryptography.exceptions import ( AlreadyFinalized, UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import HMACBackend from cryptography.hazmat.primitives import hashes, interfaces @utils.register_interface(interfaces.MACContext) @utils.register_interface(hashes.HashContext) class HMAC(object): def __init__(self, key, algorithm, backend, ctx=None): if not isinstance(backend, HMACBackend): raise UnsupportedAlgorithm( "Backend object does not implement HMACBackend.", _Reasons.BACKEND_MISSING_INTERFACE ) if not isinstance(algorithm, hashes.HashAlgorithm): raise TypeError("Expected instance of hashes.HashAlgorithm.") self._algorithm = algorithm self._backend = backend self._key = key if ctx is None: self._ctx = self._backend.create_hmac_ctx(key, self.algorithm) else: self._ctx = ctx algorithm = utils.read_only_property("_algorithm") def update(self, data): if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") if not isinstance(data, bytes): raise TypeError("data must be bytes.") self._ctx.update(data) def copy(self): if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") return HMAC( self._key, self.algorithm, backend=self._backend, ctx=self._ctx.copy() ) def finalize(self): if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") digest = self._ctx.finalize() self._ctx = None return digest def verify(self, signature): if not isinstance(signature, bytes): raise TypeError("signature must be bytes.") if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") ctx, self._ctx = self._ctx, None ctx.verify(signature)
mit
phillxnet/rockstor-core
src/rockstor/storageadmin/south_migrations/0022_auto__add_dvolume__add_unique_dvolume_container_dest_dir__add_containe.py
9
35685
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'DVolume' db.create_table(u'storageadmin_dvolume', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('container', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.DContainer'])), ('share', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.Share'], null=True)), ('dest_dir', self.gf('django.db.models.fields.CharField')(max_length=1024)), ('uservol', self.gf('django.db.models.fields.BooleanField')(default=False)), )) db.send_create_signal('storageadmin', ['DVolume']) # Adding unique constraint on 'DVolume', fields ['container', 'dest_dir'] db.create_unique(u'storageadmin_dvolume', ['container_id', 'dest_dir']) # Adding model 'ContainerOption' db.create_table(u'storageadmin_containeroption', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('container', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.DContainer'])), ('name', self.gf('django.db.models.fields.CharField')(max_length=1024)), ('val', self.gf('django.db.models.fields.CharField')(max_length=1024)), )) db.send_create_signal('storageadmin', ['ContainerOption']) # Adding model 'DImage' db.create_table(u'storageadmin_dimage', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=1024)), ('tag', self.gf('django.db.models.fields.CharField')(max_length=1024)), ('repo', self.gf('django.db.models.fields.CharField')(max_length=1024)), )) db.send_create_signal('storageadmin', ['DImage']) # Adding model 'DPort' db.create_table(u'storageadmin_dport', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('hostp', self.gf('django.db.models.fields.IntegerField')(unique=True)), ('containerp', self.gf('django.db.models.fields.IntegerField')()), ('container', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.DContainer'])), ('protocol', self.gf('django.db.models.fields.CharField')(max_length=32, null=True)), )) db.send_create_signal('storageadmin', ['DPort']) # Adding unique constraint on 'DPort', fields ['container', 'containerp'] db.create_unique(u'storageadmin_dport', ['container_id', 'containerp']) # Adding model 'RockOn' db.create_table(u'storageadmin_rockon', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=1024)), ('description', self.gf('django.db.models.fields.CharField')(max_length=2048)), ('version', self.gf('django.db.models.fields.CharField')(max_length=32)), ('state', self.gf('django.db.models.fields.CharField')(max_length=32)), ('status', self.gf('django.db.models.fields.CharField')(max_length=32)), ('link', self.gf('django.db.models.fields.CharField')(max_length=1024, null=True)), ('website', self.gf('django.db.models.fields.CharField')(max_length=2048, null=True)), )) db.send_create_signal('storageadmin', ['RockOn']) # Adding model 'DContainer' db.create_table(u'storageadmin_dcontainer', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('rockon', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.RockOn'])), ('dimage', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.DImage'])), ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=1024)), ('link', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.DContainer'], null=True)), )) db.send_create_signal('storageadmin', ['DContainer']) # Adding model 'DCustomConfig' db.create_table(u'storageadmin_dcustomconfig', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('rockon', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.RockOn'])), ('key', self.gf('django.db.models.fields.CharField')(max_length=1024)), ('val', self.gf('django.db.models.fields.CharField')(max_length=1024, null=True)), ('description', self.gf('django.db.models.fields.CharField')(max_length=2048, null=True)), )) db.send_create_signal('storageadmin', ['DCustomConfig']) # Adding unique constraint on 'DCustomConfig', fields ['rockon', 'key'] db.create_unique(u'storageadmin_dcustomconfig', ['rockon_id', 'key']) def backwards(self, orm): # Removing unique constraint on 'DCustomConfig', fields ['rockon', 'key'] db.delete_unique(u'storageadmin_dcustomconfig', ['rockon_id', 'key']) # Removing unique constraint on 'DPort', fields ['container', 'containerp'] db.delete_unique(u'storageadmin_dport', ['container_id', 'containerp']) # Removing unique constraint on 'DVolume', fields ['container', 'dest_dir'] db.delete_unique(u'storageadmin_dvolume', ['container_id', 'dest_dir']) # Deleting model 'DVolume' db.delete_table(u'storageadmin_dvolume') # Deleting model 'ContainerOption' db.delete_table(u'storageadmin_containeroption') # Deleting model 'DImage' db.delete_table(u'storageadmin_dimage') # Deleting model 'DPort' db.delete_table(u'storageadmin_dport') # Deleting model 'RockOn' db.delete_table(u'storageadmin_rockon') # Deleting model 'DContainer' db.delete_table(u'storageadmin_dcontainer') # Deleting model 'DCustomConfig' db.delete_table(u'storageadmin_dcustomconfig') models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'oauth2_provider.application': { 'Meta': {'object_name': 'Application'}, 'authorization_grant_type': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'client_id': ('django.db.models.fields.CharField', [], {'default': "u'ERc4-YE3.qaZ_rD6xEyZr4s;.e3DqqGGj=7Zv!nx'", 'unique': 'True', 'max_length': '100'}), 'client_secret': ('django.db.models.fields.CharField', [], {'default': "u'HAwZOvbLVvo5c@dloYrw92NwnCh@YS?d.yu@5T5e_JJOW5mQLz2RAXcnJ3:-x;uIrUKykS4k!m-eS7cdmh2.RX:Xn;jK-!3xWz0e3=oqydH0Xm9Q3=GwpyRxJR3@XPIw'", 'max_length': '255', 'blank': 'True'}), 'client_type': ('django.db.models.fields.CharField', [], {'max_length': '32'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'redirect_uris': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, 'storageadmin.advancednfsexport': { 'Meta': {'object_name': 'AdvancedNFSExport'}, 'export_str': ('django.db.models.fields.CharField', [], {'max_length': '4096'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'storageadmin.apikeys': { 'Meta': {'object_name': 'APIKeys'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}), 'user': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '8'}) }, 'storageadmin.appliance': { 'Meta': {'object_name': 'Appliance'}, 'client_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}), 'client_secret': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'current_appliance': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'hostname': ('django.db.models.fields.CharField', [], {'default': "'Rockstor'", 'max_length': '128'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ip': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}), 'mgmt_port': ('django.db.models.fields.IntegerField', [], {'default': '443'}), 'uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}) }, 'storageadmin.containeroption': { 'Meta': {'object_name': 'ContainerOption'}, 'container': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.DContainer']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}), 'val': ('django.db.models.fields.CharField', [], {'max_length': '1024'}) }, 'storageadmin.dashboardconfig': { 'Meta': {'object_name': 'DashboardConfig'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'}), 'widgets': ('django.db.models.fields.CharField', [], {'max_length': '4096'}) }, 'storageadmin.dcontainer': { 'Meta': {'object_name': 'DContainer'}, 'dimage': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.DImage']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'link': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.DContainer']", 'null': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '1024'}), 'rockon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.RockOn']"}) }, 'storageadmin.dcustomconfig': { 'Meta': {'unique_together': "(('rockon', 'key'),)", 'object_name': 'DCustomConfig'}, 'description': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '1024'}), 'rockon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.RockOn']"}), 'val': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}) }, 'storageadmin.dimage': { 'Meta': {'object_name': 'DImage'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}), 'repo': ('django.db.models.fields.CharField', [], {'max_length': '1024'}), 'tag': ('django.db.models.fields.CharField', [], {'max_length': '1024'}) }, 'storageadmin.disk': { 'Meta': {'object_name': 'Disk'}, 'btrfs_uuid': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}), 'offline': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'parted': ('django.db.models.fields.BooleanField', [], {}), 'pool': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Pool']", 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'serial': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), 'size': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'transport': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), 'vendor': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}) }, 'storageadmin.dport': { 'Meta': {'unique_together': "(('container', 'containerp'),)", 'object_name': 'DPort'}, 'container': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.DContainer']"}), 'containerp': ('django.db.models.fields.IntegerField', [], {}), 'hostp': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'protocol': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}) }, 'storageadmin.dvolume': { 'Meta': {'unique_together': "(('container', 'dest_dir'),)", 'object_name': 'DVolume'}, 'container': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.DContainer']"}), 'dest_dir': ('django.db.models.fields.CharField', [], {'max_length': '1024'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Share']", 'null': 'True'}), 'uservol': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'storageadmin.group': { 'Meta': {'object_name': 'Group'}, 'admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'gid': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}), 'groupname': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'storageadmin.installedplugin': { 'Meta': {'object_name': 'InstalledPlugin'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'install_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'plugin_meta': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Plugin']"}) }, 'storageadmin.iscsitarget': { 'Meta': {'object_name': 'IscsiTarget'}, 'dev_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}), 'dev_size': ('django.db.models.fields.IntegerField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Share']"}), 'tid': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}), 'tname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}) }, 'storageadmin.netatalkshare': { 'Meta': {'object_name': 'NetatalkShare'}, 'description': ('django.db.models.fields.CharField', [], {'default': "'afp on rockstor'", 'max_length': '1024'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}), 'share': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'netatalkshare'", 'unique': 'True', 'to': "orm['storageadmin.Share']"}), 'time_machine': ('django.db.models.fields.CharField', [], {'default': "'yes'", 'max_length': '3'}) }, 'storageadmin.networkinterface': { 'Meta': {'object_name': 'NetworkInterface'}, 'alias': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}), 'boot_proto': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}), 'dns_servers': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), 'domain': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), 'gateway': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ipaddr': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}), 'itype': ('django.db.models.fields.CharField', [], {'default': "'io'", 'max_length': '100'}), 'mac': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'netmask': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}), 'network': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}), 'onboot': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}) }, 'storageadmin.nfsexport': { 'Meta': {'object_name': 'NFSExport'}, 'export_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.NFSExportGroup']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mount': ('django.db.models.fields.CharField', [], {'max_length': '4096'}), 'share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Share']"}) }, 'storageadmin.nfsexportgroup': { 'Meta': {'object_name': 'NFSExportGroup'}, 'admin_host': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), 'editable': ('django.db.models.fields.CharField', [], {'default': "'rw'", 'max_length': '2'}), 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'host_str': ('django.db.models.fields.CharField', [], {'max_length': '4096'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mount_security': ('django.db.models.fields.CharField', [], {'default': "'insecure'", 'max_length': '8'}), 'nohide': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'syncable': ('django.db.models.fields.CharField', [], {'default': "'async'", 'max_length': '5'}) }, 'storageadmin.oauthapp': { 'Meta': {'object_name': 'OauthApp'}, 'application': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['oauth2_provider.Application']", 'unique': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.User']"}) }, 'storageadmin.plugin': { 'Meta': {'object_name': 'Plugin'}, 'css_file_name': ('django.db.models.fields.CharField', [], {'max_length': '4096'}), 'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4096'}), 'display_name': ('django.db.models.fields.CharField', [], {'default': "''", 'unique': 'True', 'max_length': '4096'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'js_file_name': ('django.db.models.fields.CharField', [], {'max_length': '4096'}), 'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}) }, 'storageadmin.pool': { 'Meta': {'object_name': 'Pool'}, 'compression': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mnt_options': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'null': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}), 'raid': ('django.db.models.fields.CharField', [], {'max_length': '10'}), 'size': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'toc': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'uuid': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}) }, 'storageadmin.poolbalance': { 'Meta': {'object_name': 'PoolBalance'}, 'end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'percent_done': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'pid': ('django.db.models.fields.IntegerField', [], {}), 'pool': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Pool']"}), 'start_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'started'", 'max_length': '10'}) }, 'storageadmin.poolscrub': { 'Meta': {'object_name': 'PoolScrub'}, 'corrected_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'csum_discards': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'csum_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'data_extents_scrubbed': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'kb_scrubbed': ('django.db.models.fields.BigIntegerField', [], {'null': 'True'}), 'last_physical': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'malloc_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'no_csum': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'pid': ('django.db.models.fields.IntegerField', [], {}), 'pool': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Pool']"}), 'read_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'start_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'started'", 'max_length': '10'}), 'super_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'tree_bytes_scrubbed': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'tree_extents_scrubbed': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'uncorrectable_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'unverified_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'verify_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}) }, 'storageadmin.posixacls': { 'Meta': {'object_name': 'PosixACLs'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'owner': ('django.db.models.fields.CharField', [], {'max_length': '5'}), 'perms': ('django.db.models.fields.CharField', [], {'max_length': '3'}), 'smb_share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.SambaShare']"}) }, 'storageadmin.rockon': { 'Meta': {'object_name': 'RockOn'}, 'description': ('django.db.models.fields.CharField', [], {'max_length': '2048'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'link': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}), 'state': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'version': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'website': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True'}) }, 'storageadmin.sambacustomconfig': { 'Meta': {'object_name': 'SambaCustomConfig'}, 'custom_config': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'smb_share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.SambaShare']"}) }, 'storageadmin.sambashare': { 'Meta': {'object_name': 'SambaShare'}, 'browsable': ('django.db.models.fields.CharField', [], {'default': "'yes'", 'max_length': '3'}), 'comment': ('django.db.models.fields.CharField', [], {'default': "'foo bar'", 'max_length': '100'}), 'guest_ok': ('django.db.models.fields.CharField', [], {'default': "'no'", 'max_length': '3'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}), 'read_only': ('django.db.models.fields.CharField', [], {'default': "'no'", 'max_length': '3'}), 'share': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'sambashare'", 'unique': 'True', 'to': "orm['storageadmin.Share']"}) }, 'storageadmin.setup': { 'Meta': {'object_name': 'Setup'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'setup_disks': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'setup_network': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'setup_system': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'setup_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'storageadmin.sftp': { 'Meta': {'object_name': 'SFTP'}, 'editable': ('django.db.models.fields.CharField', [], {'default': "'ro'", 'max_length': '2'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'share': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['storageadmin.Share']", 'unique': 'True'}) }, 'storageadmin.share': { 'Meta': {'object_name': 'Share'}, 'compression_algo': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), 'group': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '4096'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}), 'owner': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '4096'}), 'perms': ('django.db.models.fields.CharField', [], {'default': "'755'", 'max_length': '9'}), 'pool': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Pool']"}), 'qgroup': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'replica': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'size': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'subvol_name': ('django.db.models.fields.CharField', [], {'max_length': '4096'}), 'toc': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'uuid': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}) }, 'storageadmin.snapshot': { 'Meta': {'unique_together': "(('share', 'name'),)", 'object_name': 'Snapshot'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '4096'}), 'qgroup': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'real_name': ('django.db.models.fields.CharField', [], {'default': "'unknownsnap'", 'max_length': '4096'}), 'share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Share']"}), 'size': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'snap_type': ('django.db.models.fields.CharField', [], {'default': "'admin'", 'max_length': '64'}), 'toc': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'uvisible': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'writable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'storageadmin.supportcase': { 'Meta': {'object_name': 'SupportCase'}, 'case_type': ('django.db.models.fields.CharField', [], {'max_length': '6'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'notes': ('django.db.models.fields.TextField', [], {}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '9'}), 'zipped_log': ('django.db.models.fields.CharField', [], {'max_length': '128'}) }, 'storageadmin.user': { 'Meta': {'object_name': 'User'}, 'admin': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'email': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), 'gid': ('django.db.models.fields.IntegerField', [], {'default': '5000'}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Group']", 'null': 'True'}), 'homedir': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'public_key': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'null': 'True'}), 'shell': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}), 'smb_shares': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'admin_users'", 'null': 'True', 'to': "orm['storageadmin.SambaShare']"}), 'uid': ('django.db.models.fields.IntegerField', [], {'default': '5000'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'suser'", 'unique': 'True', 'null': 'True', 'to': u"orm['auth.User']"}), 'username': ('django.db.models.fields.CharField', [], {'default': "''", 'unique': 'True', 'max_length': '4096'}) } } complete_apps = ['storageadmin']
gpl-3.0
ville-k/tensorflow
tensorflow/python/tools/selective_registration_header_lib.py
56
6103
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== r"""Computes a header file to be used with SELECTIVE_REGISTRATION. See the executable wrapper, print_selective_registration_header.py, for more information. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import sys from google.protobuf import text_format from tensorflow.core.framework import graph_pb2 from tensorflow.python import pywrap_tensorflow from tensorflow.python.platform import gfile from tensorflow.python.platform import tf_logging def get_ops_and_kernels(proto_fileformat, proto_files, default_ops_str): """Gets the ops and kernels needed from the model files.""" ops = set() for proto_file in proto_files: tf_logging.info('Loading proto file %s', proto_file) # Load GraphDef. file_data = gfile.GFile(proto_file, 'rb').read() if proto_fileformat == 'rawproto': graph_def = graph_pb2.GraphDef.FromString(file_data) else: assert proto_fileformat == 'textproto' graph_def = text_format.Parse(file_data, graph_pb2.GraphDef()) # Find all ops and kernels used by the graph. for node_def in graph_def.node: if not node_def.device: node_def.device = '/cpu:0' kernel_class = pywrap_tensorflow.TryFindKernelClass( node_def.SerializeToString()) if kernel_class: op_and_kernel = (str(node_def.op), kernel_class.decode('utf-8')) if op_and_kernel not in ops: ops.add(op_and_kernel) else: print( 'Warning: no kernel found for op %s' % node_def.op, file=sys.stderr) # Add default ops. if default_ops_str and default_ops_str != 'all': for s in default_ops_str.split(','): op, kernel = s.split(':') op_and_kernel = (op, kernel) if op_and_kernel not in ops: ops.add(op_and_kernel) return list(sorted(ops)) def get_header_from_ops_and_kernels(ops_and_kernels, include_all_ops_and_kernels): """Returns a header for use with tensorflow SELECTIVE_REGISTRATION. Args: ops_and_kernels: a set of (op_name, kernel_class_name) pairs to include. include_all_ops_and_kernels: if True, ops_and_kernels is ignored and all op kernels are included. Returns: the string of the header that should be written as ops_to_register.h. """ ops = set([op for op, _ in ops_and_kernels]) result_list = [] def append(s): result_list.append(s) _, script_name = os.path.split(sys.argv[0]) append('// This file was autogenerated by %s' % script_name) append('#ifndef OPS_TO_REGISTER') append('#define OPS_TO_REGISTER') if include_all_ops_and_kernels: append('#define SHOULD_REGISTER_OP(op) true') append('#define SHOULD_REGISTER_OP_KERNEL(clz) true') append('#define SHOULD_REGISTER_OP_GRADIENT true') else: line = ''' namespace { constexpr const char* skip(const char* x) { return (*x) ? (*x == ' ' ? skip(x + 1) : x) : x; } constexpr bool isequal(const char* x, const char* y) { return (*skip(x) && *skip(y)) ? (*skip(x) == *skip(y) && isequal(skip(x) + 1, skip(y) + 1)) : (!*skip(x) && !*skip(y)); } template<int N> struct find_in { static constexpr bool f(const char* x, const char* const y[N]) { return isequal(x, y[0]) || find_in<N - 1>::f(x, y + 1); } }; template<> struct find_in<0> { static constexpr bool f(const char* x, const char* const y[]) { return false; } }; } // end namespace ''' line += 'constexpr const char* kNecessaryOpKernelClasses[] = {\n' for _, kernel_class in ops_and_kernels: line += '"%s",\n' % kernel_class line += '};' append(line) append('#define SHOULD_REGISTER_OP_KERNEL(clz) ' '(find_in<sizeof(kNecessaryOpKernelClasses) ' '/ sizeof(*kNecessaryOpKernelClasses)>::f(clz, ' 'kNecessaryOpKernelClasses))') append('') append('constexpr inline bool ShouldRegisterOp(const char op[]) {') append(' return false') for op in sorted(ops): append(' || isequal(op, "%s")' % op) append(' ;') append('}') append('#define SHOULD_REGISTER_OP(op) ShouldRegisterOp(op)') append('') append('#define SHOULD_REGISTER_OP_GRADIENT ' + ( 'true' if 'SymbolicGradient' in ops else 'false')) append('#endif') return '\n'.join(result_list) def get_header(graphs, proto_fileformat='rawproto', default_ops='NoOp:NoOp,_Recv:RecvOp,_Send:SendOp'): """Computes a header for use with tensorflow SELECTIVE_REGISTRATION. Args: graphs: a list of paths to GraphDef files to include. proto_fileformat: optional format of proto file, either 'textproto' or 'rawproto' (default). default_ops: optional comma-separated string of operator:kernel pairs to always include implementation for. Pass 'all' to have all operators and kernels included. Default: 'NoOp:NoOp,_Recv:RecvOp,_Send:SendOp'. Returns: the string of the header that should be written as ops_to_register.h. """ ops_and_kernels = get_ops_and_kernels(proto_fileformat, graphs, default_ops) if not ops_and_kernels: print('Error reading graph!') return 1 return get_header_from_ops_and_kernels(ops_and_kernels, default_ops == 'all')
apache-2.0
18padx08/PPTex
PPTexEnv_x86_64/lib/python2.7/site-packages/scipy/weave/tests/test_build_tools.py
96
2480
from __future__ import absolute_import, print_function # still needed # tests for MingW32Compiler # don't know how to test gcc_exists() and msvc_exists()... import os import sys import tempfile import warnings from numpy.testing import TestCase, assert_, run_module_suite from scipy.weave import build_tools # filter warnings generated by checking for bad paths warnings.filterwarnings('ignore', message="specified build_dir", module='scipy.weave') def is_writable(val): return os.access(val,os.W_OK) class TestConfigureBuildDir(TestCase): def test_default(self): # default behavior is to return current directory d = build_tools.configure_build_dir() if is_writable('.'): assert_(d == os.path.abspath('.')) assert_(is_writable(d)) def test_curdir(self): # make sure it handles relative values. d = build_tools.configure_build_dir('.') if is_writable('.'): assert_(d == os.path.abspath('.')) assert_(is_writable(d)) def test_pardir(self): # make sure it handles relative values d = build_tools.configure_build_dir('..') if is_writable('..'): assert_(d == os.path.abspath('..')) assert_(is_writable(d)) def test_bad_path(self): # bad path should return same as default (and warn) d = build_tools.configure_build_dir('_bad_path_') d2 = build_tools.configure_build_dir() assert_(d == d2) assert_(is_writable(d)) class TestConfigureTempDir(TestConfigureBuildDir): def test_default(self): # default behavior returns tempdir # Note: this'll fail if the temp directory isn't writable. d = build_tools.configure_temp_dir() assert_(d == tempfile.gettempdir()) assert_(is_writable(d)) class TestConfigureSysArgv(TestCase): def test_simple(self): build_dir = 'build_dir' temp_dir = 'temp_dir' compiler = 'compiler' pre_argv = sys.argv[:] build_tools.configure_sys_argv(compiler,temp_dir,build_dir) argv = sys.argv[:] bd = argv[argv.index('--build-lib')+1] assert_(bd == build_dir) td = argv[argv.index('--build-temp')+1] assert_(td == temp_dir) argv.index('--compiler='+compiler) build_tools.restore_sys_argv() assert_(pre_argv == sys.argv[:]) if __name__ == "__main__": run_module_suite()
mit
jotes/moto
tests/test_route53/test_route53.py
5
8783
from __future__ import unicode_literals import boto from boto.route53.healthcheck import HealthCheck from boto.route53.record import ResourceRecordSets import sure # noqa from moto import mock_route53 @mock_route53 def test_hosted_zone(): conn = boto.connect_route53('the_key', 'the_secret') firstzone = conn.create_hosted_zone("testdns.aws.com") zones = conn.get_all_hosted_zones() len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(1) conn.create_hosted_zone("testdns1.aws.com") zones = conn.get_all_hosted_zones() len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(2) id1 = firstzone["CreateHostedZoneResponse"]["HostedZone"]["Id"].split("/")[-1] zone = conn.get_hosted_zone(id1) zone["GetHostedZoneResponse"]["HostedZone"]["Name"].should.equal("testdns.aws.com") conn.delete_hosted_zone(id1) zones = conn.get_all_hosted_zones() len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(1) conn.get_hosted_zone.when.called_with("abcd").should.throw(boto.route53.exception.DNSServerError, "404 Not Found") @mock_route53 def test_rrset(): conn = boto.connect_route53('the_key', 'the_secret') conn.get_all_rrsets.when.called_with("abcd", type="A").should.throw( boto.route53.exception.DNSServerError, "404 Not Found") zone = conn.create_hosted_zone("testdns.aws.com") zoneid = zone["CreateHostedZoneResponse"]["HostedZone"]["Id"].split("/")[-1] changes = ResourceRecordSets(conn, zoneid) change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") change.add_value("1.2.3.4") changes.commit() rrsets = conn.get_all_rrsets(zoneid, type="A") rrsets.should.have.length_of(1) rrsets[0].resource_records[0].should.equal('1.2.3.4') rrsets = conn.get_all_rrsets(zoneid, type="CNAME") rrsets.should.have.length_of(0) changes = ResourceRecordSets(conn, zoneid) changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A") change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") change.add_value("5.6.7.8") changes.commit() rrsets = conn.get_all_rrsets(zoneid, type="A") rrsets.should.have.length_of(1) rrsets[0].resource_records[0].should.equal('5.6.7.8') changes = ResourceRecordSets(conn, zoneid) changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A") changes.commit() rrsets = conn.get_all_rrsets(zoneid) rrsets.should.have.length_of(0) changes = ResourceRecordSets(conn, zoneid) change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") change.add_value("1.2.3.4") change = changes.add_change("CREATE", "bar.foo.testdns.aws.com", "A") change.add_value("5.6.7.8") changes.commit() rrsets = conn.get_all_rrsets(zoneid, type="A") rrsets.should.have.length_of(2) rrsets = conn.get_all_rrsets(zoneid, name="foo.bar.testdns.aws.com", type="A") rrsets.should.have.length_of(1) rrsets[0].resource_records[0].should.equal('1.2.3.4') rrsets = conn.get_all_rrsets(zoneid, name="bar.foo.testdns.aws.com", type="A") rrsets.should.have.length_of(1) rrsets[0].resource_records[0].should.equal('5.6.7.8') rrsets = conn.get_all_rrsets(zoneid, name="foo.foo.testdns.aws.com", type="A") rrsets.should.have.length_of(0) @mock_route53 def test_rrset_with_multiple_values(): conn = boto.connect_route53('the_key', 'the_secret') zone = conn.create_hosted_zone("testdns.aws.com") zoneid = zone["CreateHostedZoneResponse"]["HostedZone"]["Id"].split("/")[-1] changes = ResourceRecordSets(conn, zoneid) change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A") change.add_value("1.2.3.4") change.add_value("5.6.7.8") changes.commit() rrsets = conn.get_all_rrsets(zoneid, type="A") rrsets.should.have.length_of(1) set(rrsets[0].resource_records).should.equal(set(['1.2.3.4', '5.6.7.8'])) @mock_route53 def test_alias_rrset(): conn = boto.connect_route53('the_key', 'the_secret') zone = conn.create_hosted_zone("testdns.aws.com") zoneid = zone["CreateHostedZoneResponse"]["HostedZone"]["Id"].split("/")[-1] changes = ResourceRecordSets(conn, zoneid) changes.add_change("CREATE", "foo.alias.testdns.aws.com", "A", alias_hosted_zone_id="Z3DG6IL3SJCGPX", alias_dns_name="foo.testdns.aws.com") changes.add_change("CREATE", "bar.alias.testdns.aws.com", "CNAME", alias_hosted_zone_id="Z3DG6IL3SJCGPX", alias_dns_name="bar.testdns.aws.com") changes.commit() rrsets = conn.get_all_rrsets(zoneid, type="A") rrsets.should.have.length_of(1) rrsets[0].resource_records[0].should.equal('foo.testdns.aws.com') rrsets = conn.get_all_rrsets(zoneid, type="CNAME") rrsets.should.have.length_of(1) rrsets[0].resource_records[0].should.equal('bar.testdns.aws.com') @mock_route53 def test_create_health_check(): conn = boto.connect_route53('the_key', 'the_secret') check = HealthCheck( ip_addr="10.0.0.25", port=80, hc_type="HTTP", resource_path="/", fqdn="example.com", string_match="a good response", request_interval=10, failure_threshold=2, ) conn.create_health_check(check) checks = conn.get_list_health_checks()['ListHealthChecksResponse']['HealthChecks'] list(checks).should.have.length_of(1) check = checks[0] config = check['HealthCheckConfig'] config['IPAddress'].should.equal("10.0.0.25") config['Port'].should.equal("80") config['Type'].should.equal("HTTP") config['ResourcePath'].should.equal("/") config['FullyQualifiedDomainName'].should.equal("example.com") config['SearchString'].should.equal("a good response") config['RequestInterval'].should.equal("10") config['FailureThreshold'].should.equal("2") @mock_route53 def test_delete_health_check(): conn = boto.connect_route53('the_key', 'the_secret') check = HealthCheck( ip_addr="10.0.0.25", port=80, hc_type="HTTP", resource_path="/", ) conn.create_health_check(check) checks = conn.get_list_health_checks()['ListHealthChecksResponse']['HealthChecks'] list(checks).should.have.length_of(1) health_check_id = checks[0]['Id'] conn.delete_health_check(health_check_id) checks = conn.get_list_health_checks()['ListHealthChecksResponse']['HealthChecks'] list(checks).should.have.length_of(0) @mock_route53 def test_use_health_check_in_resource_record_set(): conn = boto.connect_route53('the_key', 'the_secret') check = HealthCheck( ip_addr="10.0.0.25", port=80, hc_type="HTTP", resource_path="/", ) check = conn.create_health_check(check)['CreateHealthCheckResponse']['HealthCheck'] check_id = check['Id'] zone = conn.create_hosted_zone("testdns.aws.com") zone_id = zone["CreateHostedZoneResponse"]["HostedZone"]["Id"].split("/")[-1] changes = ResourceRecordSets(conn, zone_id) change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A", health_check=check_id) change.add_value("1.2.3.4") changes.commit() record_sets = conn.get_all_rrsets(zone_id) record_sets[0].health_check.should.equal(check_id) @mock_route53 def test_hosted_zone_comment_preserved(): conn = boto.connect_route53('the_key', 'the_secret') firstzone = conn.create_hosted_zone("testdns.aws.com.", comment="test comment") zone_id = firstzone["CreateHostedZoneResponse"]["HostedZone"]["Id"].split("/")[-1] hosted_zone = conn.get_hosted_zone(zone_id) hosted_zone["GetHostedZoneResponse"]["HostedZone"]["Config"]["Comment"].should.equal("test comment") hosted_zones = conn.get_all_hosted_zones() hosted_zones["ListHostedZonesResponse"]["HostedZones"][0]["Config"]["Comment"].should.equal("test comment") zone = conn.get_zone("testdns.aws.com.") zone.config["Comment"].should.equal("test comment") @mock_route53 def test_deleting_weighted_route(): conn = boto.connect_route53() conn.create_hosted_zone("testdns.aws.com.") zone = conn.get_zone("testdns.aws.com.") zone.add_cname("cname.testdns.aws.com", "example.com", identifier=('success-test-foo', '50')) zone.add_cname("cname.testdns.aws.com", "example.com", identifier=('success-test-bar', '50')) cnames = zone.get_cname('cname.testdns.aws.com.', all=True) cnames.should.have.length_of(2) foo_cname = [cname for cname in cnames if cname.identifier == 'success-test-foo'][0] zone.delete_record(foo_cname) cname = zone.get_cname('cname.testdns.aws.com.', all=True) # When get_cname only had one result, it returns just that result instead of a list. cname.identifier.should.equal('success-test-bar')
apache-2.0
nitzmahone/ansible
lib/ansible/modules/cloud/ovirt/ovirt_disk.py
5
28981
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2016 Red Hat, Inc. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ovirt_disk short_description: "Module to manage Virtual Machine and floating disks in oVirt/RHV" version_added: "2.2" author: "Ondra Machacek (@machacekondra)" description: - "Module to manage Virtual Machine and floating disks in oVirt/RHV." options: id: description: - "ID of the disk to manage. Either C(id) or C(name) is required." name: description: - "Name of the disk to manage. Either C(id) or C(name)/C(alias) is required." aliases: ['alias'] description: description: - "Description of the disk image to manage." version_added: "2.5" vm_name: description: - "Name of the Virtual Machine to manage. Either C(vm_id) or C(vm_name) is required if C(state) is I(attached) or I(detached)." vm_id: description: - "ID of the Virtual Machine to manage. Either C(vm_id) or C(vm_name) is required if C(state) is I(attached) or I(detached)." state: description: - "Should the Virtual Machine disk be present/absent/attached/detached." choices: ['present', 'absent', 'attached', 'detached'] default: 'present' download_image_path: description: - "Path on a file system where disk should be downloaded." - "Note that you must have an valid oVirt/RHV engine CA in your system trust store or you must provide it in C(ca_file) parameter." - "Note that the disk is not downloaded when the file already exists, but you can forcibly download the disk when using C(force) I (true)." version_added: "2.3" upload_image_path: description: - "Path to disk image, which should be uploaded." - "Note that currently we support only compatibility version 0.10 of the qcow disk." - "Note that you must have an valid oVirt/RHV engine CA in your system trust store or you must provide it in C(ca_file) parameter." - "Note that there is no reliable way to achieve idempotency, so if you want to upload the disk even if the disk with C(id) or C(name) exists, then please use C(force) I(true). If you will use C(force) I(false), which is default, then the disk image won't be uploaded." version_added: "2.3" size: description: - "Size of the disk. Size should be specified using IEC standard units. For example 10GiB, 1024MiB, etc." - "Size can be only increased, not decreased." interface: description: - "Driver of the storage interface." - "It's required parameter when creating the new disk." choices: ['virtio', 'ide', 'virtio_scsi'] default: 'virtio' format: description: - Specify format of the disk. - Note that this option isn't idempotent as it's not currently possible to change format of the disk via API. choices: ['raw', 'cow'] sparse: required: False type: bool version_added: "2.5" description: - "I(True) if the disk should be sparse (also known as I(thin provision)). If the parameter is omitted, cow disks will be created as sparse and raw disks as I(preallocated)" - Note that this option isn't idempotent as it's not currently possible to change sparseness of the disk via API. storage_domain: description: - "Storage domain name where disk should be created. By default storage is chosen by oVirt/RHV engine." storage_domains: description: - "Storage domain names where disk should be copied." - "C(**IMPORTANT**)" - "There is no reliable way to achieve idempotency, so every time you specify this parameter the disks are copied, so please handle your playbook accordingly to not copy the disks all the time. This is valid only for VM and floating disks, template disks works as expected." version_added: "2.3" force: description: - "Please take a look at C(image_path) documentation to see the correct usage of this parameter." version_added: "2.3" type: bool profile: description: - "Disk profile name to be attached to disk. By default profile is chosen by oVirt/RHV engine." quota_id: description: - "Disk quota ID to be used for disk. By default quota is chosen by oVirt/RHV engine." version_added: "2.5" bootable: description: - "I(True) if the disk should be bootable. By default when disk is created it isn't bootable." type: bool shareable: description: - "I(True) if the disk should be shareable. By default when disk is created it isn't shareable." type: bool logical_unit: description: - "Dictionary which describes LUN to be directly attached to VM:" - "C(address) - Address of the storage server. Used by iSCSI." - "C(port) - Port of the storage server. Used by iSCSI." - "C(target) - iSCSI target." - "C(lun_id) - LUN id." - "C(username) - CHAP Username to be used to access storage server. Used by iSCSI." - "C(password) - CHAP Password of the user to be used to access storage server. Used by iSCSI." - "C(storage_type) - Storage type either I(fcp) or I(iscsi)." sparsify: description: - "I(True) if the disk should be sparsified." - "Sparsification frees space in the disk image that is not used by its filesystem. As a result, the image will occupy less space on the storage." - "Note that this parameter isn't idempotent, as it's not possible to check if the disk should be or should not be sparsified." version_added: "2.4" type: bool openstack_volume_type: description: - "Name of the openstack volume type. This is valid when working with cinder." version_added: "2.4" image_provider: description: - "When C(state) is I(exported) disk is exported to given Glance image provider." - "C(**IMPORTANT**)" - "There is no reliable way to achieve idempotency, so every time you specify this parameter the disk is exported, so please handle your playbook accordingly to not export the disk all the time. This option is valid only for template disks." version_added: "2.4" host: description: - "When the hypervisor name is specified the newly created disk or an existing disk will refresh its information about the underlying storage( Disk size, Serial, Product ID, Vendor ID ...) The specified host will be used for gathering the storage related information. This option is only valid for passthrough disks. This option requires at least the logical_unit.id to be specified" version_added: "2.8" wipe_after_delete: description: - "If the disk's Wipe After Delete is enabled, then the disk is first wiped." type: bool version_added: "2.8" extends_documentation_fragment: ovirt ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: # Create and attach new disk to VM - ovirt_disk: name: myvm_disk vm_name: rhel7 size: 10GiB format: cow interface: virtio storage_domain: data # Attach logical unit to VM rhel7 - ovirt_disk: vm_name: rhel7 logical_unit: target: iqn.2016-08-09.brq.str-01:omachace id: 1IET_000d0001 address: 10.34.63.204 interface: virtio # Detach disk from VM - ovirt_disk: state: detached name: myvm_disk vm_name: rhel7 size: 10GiB format: cow interface: virtio # Change Disk Name - ovirt_disk: id: 00000000-0000-0000-0000-000000000000 storage_domain: data name: "new_disk_name" vm_name: rhel7 # Upload local image to disk and attach it to vm: # Since Ansible 2.3 - ovirt_disk: name: mydisk vm_name: myvm interface: virtio size: 10GiB format: cow image_path: /path/to/mydisk.qcow2 storage_domain: data # Download disk to local file system: # Since Ansible 2.3 - ovirt_disk: id: 7de90f31-222c-436c-a1ca-7e655bd5b60c download_image_path: /home/user/mydisk.qcow2 # Export disk as image to Glance domain # Since Ansible 2.4 - ovirt_disks: id: 7de90f31-222c-436c-a1ca-7e655bd5b60c image_provider: myglance state: exported # Defining a specific quota while creating a disk image: # Since Ansible 2.5 - ovirt_quotas_facts: data_center: Default name: myquota - ovirt_disk: name: mydisk size: 10GiB storage_domain: data description: somedescriptionhere quota_id: "{{ ovirt_quotas[0]['id'] }}" ''' RETURN = ''' id: description: "ID of the managed disk" returned: "On success if disk is found." type: str sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c disk: description: "Dictionary of all the disk attributes. Disk attributes can be found on your oVirt/RHV instance at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/disk." returned: "On success if disk is found and C(vm_id) or C(vm_name) wasn't passed." type: dict disk_attachment: description: "Dictionary of all the disk attachment attributes. Disk attachment attributes can be found on your oVirt/RHV instance at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/disk_attachment." returned: "On success if disk is found and C(vm_id) or C(vm_name) was passed and VM was found." type: dict ''' import os import time import traceback import ssl from ansible.module_utils.six.moves.http_client import HTTPSConnection, IncompleteRead from ansible.module_utils.six.moves.urllib.parse import urlparse try: import ovirtsdk4.types as otypes except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( BaseModule, check_sdk, check_params, create_connection, convert_to_bytes, equal, follow_link, get_id_by_name, ovirt_full_argument_spec, search_by_name, wait, ) def _search_by_lun(disks_service, lun_id): """ Find disk by LUN ID. """ res = [ disk for disk in disks_service.list(search='disk_type=lun') if ( disk.lun_storage.id == lun_id ) ] return res[0] if res else None def transfer(connection, module, direction, transfer_func): transfers_service = connection.system_service().image_transfers_service() transfer = transfers_service.add( otypes.ImageTransfer( image=otypes.Image( id=module.params['id'], ), direction=direction, ) ) transfer_service = transfers_service.image_transfer_service(transfer.id) try: # After adding a new transfer for the disk, the transfer's status will be INITIALIZING. # Wait until the init phase is over. The actual transfer can start when its status is "Transferring". while transfer.phase == otypes.ImageTransferPhase.INITIALIZING: time.sleep(module.params['poll_interval']) transfer = transfer_service.get() proxy_url = urlparse(transfer.proxy_url) context = ssl.create_default_context() auth = module.params['auth'] if auth.get('insecure'): context.check_hostname = False context.verify_mode = ssl.CERT_NONE elif auth.get('ca_file'): context.load_verify_locations(cafile=auth.get('ca_file')) proxy_connection = HTTPSConnection( proxy_url.hostname, proxy_url.port, context=context, ) transfer_func( transfer_service, proxy_connection, proxy_url, transfer.signed_ticket ) return True finally: transfer_service.finalize() while transfer.phase in [ otypes.ImageTransferPhase.TRANSFERRING, otypes.ImageTransferPhase.FINALIZING_SUCCESS, ]: time.sleep(module.params['poll_interval']) transfer = transfer_service.get() if transfer.phase in [ otypes.ImageTransferPhase.UNKNOWN, otypes.ImageTransferPhase.FINISHED_FAILURE, otypes.ImageTransferPhase.FINALIZING_FAILURE, otypes.ImageTransferPhase.CANCELLED, ]: raise Exception( "Error occurred while uploading image. The transfer is in %s" % transfer.phase ) if module.params.get('logical_unit'): disks_service = connection.system_service().disks_service() wait( service=disks_service.service(module.params['id']), condition=lambda d: d.status == otypes.DiskStatus.OK, wait=module.params['wait'], timeout=module.params['timeout'], ) def download_disk_image(connection, module): def _transfer(transfer_service, proxy_connection, proxy_url, transfer_ticket): BUF_SIZE = 128 * 1024 transfer_headers = { 'Authorization': transfer_ticket, } proxy_connection.request( 'GET', proxy_url.path, headers=transfer_headers, ) r = proxy_connection.getresponse() path = module.params["download_image_path"] image_size = int(r.getheader('Content-Length')) with open(path, "wb") as mydisk: pos = 0 while pos < image_size: to_read = min(image_size - pos, BUF_SIZE) chunk = r.read(to_read) if not chunk: raise RuntimeError("Socket disconnected") mydisk.write(chunk) pos += len(chunk) return transfer( connection, module, otypes.ImageTransferDirection.DOWNLOAD, transfer_func=_transfer, ) def upload_disk_image(connection, module): def _transfer(transfer_service, proxy_connection, proxy_url, transfer_ticket): BUF_SIZE = 128 * 1024 path = module.params['upload_image_path'] image_size = os.path.getsize(path) proxy_connection.putrequest("PUT", proxy_url.path) proxy_connection.putheader('Content-Length', "%d" % (image_size,)) proxy_connection.endheaders() with open(path, "rb") as disk: pos = 0 while pos < image_size: to_read = min(image_size - pos, BUF_SIZE) chunk = disk.read(to_read) if not chunk: transfer_service.pause() raise RuntimeError("Unexpected end of file at pos=%d" % pos) proxy_connection.send(chunk) pos += len(chunk) return transfer( connection, module, otypes.ImageTransferDirection.UPLOAD, transfer_func=_transfer, ) class DisksModule(BaseModule): def build_entity(self): logical_unit = self._module.params.get('logical_unit') disk = otypes.Disk( id=self._module.params.get('id'), name=self._module.params.get('name'), description=self._module.params.get('description'), format=otypes.DiskFormat( self._module.params.get('format') ) if self._module.params.get('format') else None, sparse=self._module.params.get( 'sparse' ) if self._module.params.get( 'sparse' ) is not None else self._module.params.get('format') != 'raw', openstack_volume_type=otypes.OpenStackVolumeType( name=self.param('openstack_volume_type') ) if self.param('openstack_volume_type') else None, provisioned_size=convert_to_bytes( self._module.params.get('size') ), storage_domains=[ otypes.StorageDomain( name=self._module.params.get('storage_domain'), ), ], quota=otypes.Quota(id=self._module.params.get('quota_id')) if self.param('quota_id') else None, shareable=self._module.params.get('shareable'), wipe_after_delete=self.param('wipe_after_delete'), lun_storage=otypes.HostStorage( type=otypes.StorageType( logical_unit.get('storage_type', 'iscsi') ), logical_units=[ otypes.LogicalUnit( address=logical_unit.get('address'), port=logical_unit.get('port', 3260), target=logical_unit.get('target'), id=logical_unit.get('id'), username=logical_unit.get('username'), password=logical_unit.get('password'), ) ], ) if logical_unit else None, ) if hasattr(disk, 'initial_size'): disk.initial_size = convert_to_bytes( self._module.params.get('size') ) return disk def update_storage_domains(self, disk_id): changed = False disk_service = self._service.service(disk_id) disk = disk_service.get() sds_service = self._connection.system_service().storage_domains_service() # We don't support move&copy for non file based storages: if disk.storage_type != otypes.DiskStorageType.IMAGE: return changed # Initiate move: if self._module.params['storage_domain']: new_disk_storage_id = get_id_by_name(sds_service, self._module.params['storage_domain']) changed = self.action( action='move', entity=disk, action_condition=lambda d: new_disk_storage_id != d.storage_domains[0].id, wait_condition=lambda d: d.status == otypes.DiskStatus.OK, storage_domain=otypes.StorageDomain( id=new_disk_storage_id, ), post_action=lambda _: time.sleep(self._module.params['poll_interval']), )['changed'] if self._module.params['storage_domains']: for sd in self._module.params['storage_domains']: new_disk_storage = search_by_name(sds_service, sd) changed = changed or self.action( action='copy', entity=disk, action_condition=( lambda disk: new_disk_storage.id not in [sd.id for sd in disk.storage_domains] ), wait_condition=lambda disk: disk.status == otypes.DiskStatus.OK, storage_domain=otypes.StorageDomain( id=new_disk_storage.id, ), )['changed'] return changed def _update_check(self, entity): return ( equal(self._module.params.get('name'), entity.name) and equal(self._module.params.get('description'), entity.description) and equal(self.param('quota_id'), getattr(entity.quota, 'id', None)) and equal(convert_to_bytes(self._module.params.get('size')), entity.provisioned_size) and equal(self._module.params.get('shareable'), entity.shareable) and equal(self.param('wipe_after_delete'), entity.wipe_after_delete) ) class DiskAttachmentsModule(DisksModule): def build_entity(self): return otypes.DiskAttachment( disk=super(DiskAttachmentsModule, self).build_entity(), interface=otypes.DiskInterface( self._module.params.get('interface') ) if self._module.params.get('interface') else None, bootable=self._module.params.get('bootable'), active=True, ) def update_check(self, entity): return ( super(DiskAttachmentsModule, self)._update_check(follow_link(self._connection, entity.disk)) and equal(self._module.params.get('interface'), str(entity.interface)) and equal(self._module.params.get('bootable'), entity.bootable) ) def searchable_attributes(module): """ Return all searchable disk attributes passed to module. """ attributes = { 'name': module.params.get('name'), 'Storage.name': module.params.get('storage_domain'), 'vm_names': module.params.get('vm_name'), } return dict((k, v) for k, v in attributes.items() if v is not None) def main(): argument_spec = ovirt_full_argument_spec( state=dict( choices=['present', 'absent', 'attached', 'detached', 'exported'], default='present' ), id=dict(default=None), name=dict(default=None, aliases=['alias']), description=dict(default=None), vm_name=dict(default=None), vm_id=dict(default=None), size=dict(default=None), interface=dict(default=None,), storage_domain=dict(default=None), storage_domains=dict(default=None, type='list'), profile=dict(default=None), quota_id=dict(default=None), format=dict(default='cow', choices=['raw', 'cow']), sparse=dict(default=None, type='bool'), bootable=dict(default=None, type='bool'), shareable=dict(default=None, type='bool'), logical_unit=dict(default=None, type='dict'), download_image_path=dict(default=None), upload_image_path=dict(default=None, aliases=['image_path']), force=dict(default=False, type='bool'), sparsify=dict(default=None, type='bool'), openstack_volume_type=dict(default=None), image_provider=dict(default=None), host=dict(default=None), wipe_after_delete=dict(type='bool', default=None), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) lun = module.params.get('logical_unit') host = module.params['host'] # Fail when host is specified with the LUN id. Lun id is needed to identify # an existing disk if already available inthe environment. if (host and lun is None) or (host and lun.get("id") is None): module.fail_json( msg="Can not use parameter host ({0!s}) without " "specifying the logical_unit id".format(host) ) check_sdk(module) check_params(module) try: disk = None state = module.params['state'] auth = module.params.get('auth') connection = create_connection(auth) disks_service = connection.system_service().disks_service() disks_module = DisksModule( connection=connection, module=module, service=disks_service, ) if lun: disk = _search_by_lun(disks_service, lun.get('id')) ret = None # First take care of creating the VM, if needed: if state in ('present', 'detached', 'attached'): ret = disks_module.create( entity=disk, search_params=searchable_attributes(module), result_state=otypes.DiskStatus.OK if lun is None else None, fail_condition=lambda d: d.status == otypes.DiskStatus.ILLEGAL if lun is None else False, ) is_new_disk = ret['changed'] ret['changed'] = ret['changed'] or disks_module.update_storage_domains(ret['id']) # We need to pass ID to the module, so in case we want detach/attach disk # we have this ID specified to attach/detach method: module.params['id'] = ret['id'] if disk is None else disk.id # Upload disk image in case it's new disk or force parameter is passed: if module.params['upload_image_path'] and (is_new_disk or module.params['force']): uploaded = upload_disk_image(connection, module) ret['changed'] = ret['changed'] or uploaded # Download disk image in case it's file don't exist or force parameter is passed: if ( module.params['download_image_path'] and (not os.path.isfile(module.params['download_image_path']) or module.params['force']) ): downloaded = download_disk_image(connection, module) ret['changed'] = ret['changed'] or downloaded # Disk sparsify, only if disk is of image type: disk = disks_service.disk_service(module.params['id']).get() if disk.storage_type == otypes.DiskStorageType.IMAGE: ret = disks_module.action( action='sparsify', action_condition=lambda d: module.params['sparsify'], wait_condition=lambda d: d.status == otypes.DiskStatus.OK, ) # Export disk as image to glance domain elif state == 'exported': disk = disks_module.search_entity() if disk is None: module.fail_json( msg="Can not export given disk '%s', it doesn't exist" % module.params.get('name') or module.params.get('id') ) if disk.storage_type == otypes.DiskStorageType.IMAGE: ret = disks_module.action( action='export', action_condition=lambda d: module.params['image_provider'], wait_condition=lambda d: d.status == otypes.DiskStatus.OK, storage_domain=otypes.StorageDomain(name=module.params['image_provider']), ) elif state == 'absent': ret = disks_module.remove() # If VM was passed attach/detach disks to/from the VM: if module.params.get('vm_id') is not None or module.params.get('vm_name') is not None and state != 'absent': vms_service = connection.system_service().vms_service() # If `vm_id` isn't specified, find VM by name: vm_id = module.params['vm_id'] if vm_id is None: vm_id = getattr(search_by_name(vms_service, module.params['vm_name']), 'id', None) if vm_id is None: module.fail_json( msg="VM don't exists, please create it first." ) disk_attachments_service = vms_service.vm_service(vm_id).disk_attachments_service() disk_attachments_module = DiskAttachmentsModule( connection=connection, module=module, service=disk_attachments_service, changed=ret['changed'] if ret else False, ) if state == 'present' or state == 'attached': ret = disk_attachments_module.create() if lun is None: wait( service=disk_attachments_service.service(ret['id']), condition=lambda d: follow_link(connection, d.disk).status == otypes.DiskStatus.OK, wait=module.params['wait'], timeout=module.params['timeout'], ) elif state == 'detached': ret = disk_attachments_module.remove() # When the host parameter is specified and the disk is not being # removed, refresh the information about the LUN. if state != 'absent' and host: hosts_service = connection.system_service().hosts_service() host_id = get_id_by_name(hosts_service, host) disks_service.disk_service(disk.id).refresh_lun(otypes.Host(id=host_id)) module.exit_json(**ret) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout=auth.get('token') is None) if __name__ == "__main__": main()
gpl-3.0
mdesaive/admindb
admindb/admindb/settings.py
1
4878
""" Django settings for admindb project. Generated by 'django-admin startproject' using Django 1.9.4. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ Author: Melanie Desaive, desaive@gmx.de Copyrigh (c) 2016, Melanie Desaive All rights reserved. Licensed under the GNU General Public License. See: COPYING.txt in project root. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import os # Database configuration with secret password excluded from GIT and in # seperate file. from .settings_secret import * # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '=a2)z5+5_jf*9&i14$s5@u-)8j9ipder!k_ebe$=opf7x3n=u@' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True # Uncommented block due to warning: # # ?: (1_8.W001) The standalone TEMPLATE_* settings were deprecated in Django 1.8 # and the TEMPLATES dictionary takes precedence. You must put the values of the # following settings into your default TEMPLATES dict: # TEMPLATE_CONTEXT_PROCESSORS, TEMPLATE_DEBUG. # # TEMPLATE_DEBUG = True # # TEMPLATE_CONTEXT_PROCESSORS = ( # 'django.contrib.auth.context_processors.auth', # 'django.core.context_processors.debug', # 'django.core.context_processors.i18n', # 'django.core.context_processors.request', # 'django.core.context_processors.static', # 'django.contrib.messages.context_processors.messages', # ) ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'itservices', 'itservices.systems', 'itservices.systems.hardware', 'itservices.systems.linuxos', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'admindb.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'django.template.context_processors.static', 'django.template.context_processors.i18n', ], }, }, ] WSGI_APPLICATION = 'admindb.wsgi.application' # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.' + 'UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.' + 'MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.' + 'CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.' + 'NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/'
gpl-3.0
tiagocardosos/stoq
stoqlib/l10n/l10n.py
3
1931
# -*- coding: utf-8 -*- # vi:si:et:sw=4:sts=4:ts=4 ## ## Copyright (C) 2012 Async Open Source ## ## This program is free software; you can redistribute it and/or ## modify it under the terms of the GNU Lesser General Public License ## as published by the Free Software Foundation; either version 2 ## of the License, or (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU Lesser General Public License for more details. ## ## You should have received a copy of the GNU Lesser General Public License ## along with this program; if not, write to the Free Software ## Foundation, Inc., or visit: http://www.gnu.org/. ## ## ## Author(s): Stoq Team <stoq-devel@async.com.br> ## ## from kiwi.python import namedAny from stoqlib.l10n.generic import generic # FIXME: When fixing bug 5100, this won't be necessary anymore. # This maps country lists in stoqlib.lib.countries to ISO 639-1 iso639_list = { # FIXME: We should use a combo in parameters instead. 'brazil': 'br', 'brasil': 'br', 'sverige': 'sv', 'sweden': 'sv', } def get_l10n_module(country=None): if not country: from stoqlib.lib.parameters import sysparam country = sysparam.get_string('COUNTRY_SUGGESTED') short = iso639_list.get(country.lower(), None) if short is None: return generic path = 'stoqlib.l10n.%s.%s' % (short, short) try: module = namedAny(path) except (ImportError, AttributeError): return generic return module def get_l10n_field(field_name, country=None): module = get_l10n_module(country) field = getattr(module, field_name, None) if field is None: assert hasattr(generic, field_name) field = getattr(generic, field_name) return field
gpl-2.0
gauribhoite/personfinder
env/google_appengine/lib/django-1.3/django/contrib/contenttypes/generic.py
155
17218
""" Classes allowing "generic" relations through ContentType and object-id fields. """ from django.core.exceptions import ObjectDoesNotExist from django.db import connection from django.db.models import signals from django.db import models, router, DEFAULT_DB_ALIAS from django.db.models.fields.related import RelatedField, Field, ManyToManyRel from django.db.models.loading import get_model from django.forms import ModelForm from django.forms.models import BaseModelFormSet, modelformset_factory, save_instance from django.contrib.admin.options import InlineModelAdmin, flatten_fieldsets from django.utils.encoding import smart_unicode from django.utils.functional import curry from django.contrib.contenttypes.models import ContentType class GenericForeignKey(object): """ Provides a generic relation to any object through content-type/object-id fields. """ def __init__(self, ct_field="content_type", fk_field="object_id"): self.ct_field = ct_field self.fk_field = fk_field def contribute_to_class(self, cls, name): self.name = name self.model = cls self.cache_attr = "_%s_cache" % name cls._meta.add_virtual_field(self) # For some reason I don't totally understand, using weakrefs here doesn't work. signals.pre_init.connect(self.instance_pre_init, sender=cls, weak=False) # Connect myself as the descriptor for this field setattr(cls, name, self) def instance_pre_init(self, signal, sender, args, kwargs, **_kwargs): """ Handles initializing an object with the generic FK instaed of content-type/object-id fields. """ if self.name in kwargs: value = kwargs.pop(self.name) kwargs[self.ct_field] = self.get_content_type(obj=value) kwargs[self.fk_field] = value._get_pk_val() def get_content_type(self, obj=None, id=None, using=None): # Convenience function using get_model avoids a circular import when # using this model ContentType = get_model("contenttypes", "contenttype") if obj: return ContentType.objects.db_manager(obj._state.db).get_for_model(obj) elif id: return ContentType.objects.db_manager(using).get_for_id(id) else: # This should never happen. I love comments like this, don't you? raise Exception("Impossible arguments to GFK.get_content_type!") def __get__(self, instance, instance_type=None): if instance is None: return self try: return getattr(instance, self.cache_attr) except AttributeError: rel_obj = None # Make sure to use ContentType.objects.get_for_id() to ensure that # lookups are cached (see ticket #5570). This takes more code than # the naive ``getattr(instance, self.ct_field)``, but has better # performance when dealing with GFKs in loops and such. f = self.model._meta.get_field(self.ct_field) ct_id = getattr(instance, f.get_attname(), None) if ct_id: ct = self.get_content_type(id=ct_id, using=instance._state.db) try: rel_obj = ct.get_object_for_this_type(pk=getattr(instance, self.fk_field)) except ObjectDoesNotExist: pass setattr(instance, self.cache_attr, rel_obj) return rel_obj def __set__(self, instance, value): if instance is None: raise AttributeError(u"%s must be accessed via instance" % self.related.opts.object_name) ct = None fk = None if value is not None: ct = self.get_content_type(obj=value) fk = value._get_pk_val() setattr(instance, self.ct_field, ct) setattr(instance, self.fk_field, fk) setattr(instance, self.cache_attr, value) class GenericRelation(RelatedField, Field): """Provides an accessor to generic related objects (e.g. comments)""" def __init__(self, to, **kwargs): kwargs['verbose_name'] = kwargs.get('verbose_name', None) kwargs['rel'] = GenericRel(to, related_name=kwargs.pop('related_name', None), limit_choices_to=kwargs.pop('limit_choices_to', None), symmetrical=kwargs.pop('symmetrical', True)) # Override content-type/object-id field names on the related class self.object_id_field_name = kwargs.pop("object_id_field", "object_id") self.content_type_field_name = kwargs.pop("content_type_field", "content_type") kwargs['blank'] = True kwargs['editable'] = False kwargs['serialize'] = False Field.__init__(self, **kwargs) def get_choices_default(self): return Field.get_choices(self, include_blank=False) def value_to_string(self, obj): qs = getattr(obj, self.name).all() return smart_unicode([instance._get_pk_val() for instance in qs]) def m2m_db_table(self): return self.rel.to._meta.db_table def m2m_column_name(self): return self.object_id_field_name def m2m_reverse_name(self): return self.rel.to._meta.pk.column def m2m_target_field_name(self): return self.model._meta.pk.name def m2m_reverse_target_field_name(self): return self.rel.to._meta.pk.name def contribute_to_class(self, cls, name): super(GenericRelation, self).contribute_to_class(cls, name) # Save a reference to which model this class is on for future use self.model = cls # Add the descriptor for the m2m relation setattr(cls, self.name, ReverseGenericRelatedObjectsDescriptor(self)) def contribute_to_related_class(self, cls, related): pass def set_attributes_from_rel(self): pass def get_internal_type(self): return "ManyToManyField" def db_type(self, connection): # Since we're simulating a ManyToManyField, in effect, best return the # same db_type as well. return None def extra_filters(self, pieces, pos, negate): """ Return an extra filter to the queryset so that the results are filtered on the appropriate content type. """ if negate: return [] ContentType = get_model("contenttypes", "contenttype") content_type = ContentType.objects.get_for_model(self.model) prefix = "__".join(pieces[:pos + 1]) return [("%s__%s" % (prefix, self.content_type_field_name), content_type)] def bulk_related_objects(self, objs, using=DEFAULT_DB_ALIAS): """ Return all objects related to ``objs`` via this ``GenericRelation``. """ return self.rel.to._base_manager.db_manager(using).filter(**{ "%s__pk" % self.content_type_field_name: ContentType.objects.db_manager(using).get_for_model(self.model).pk, "%s__in" % self.object_id_field_name: [obj.pk for obj in objs] }) class ReverseGenericRelatedObjectsDescriptor(object): """ This class provides the functionality that makes the related-object managers available as attributes on a model class, for fields that have multiple "remote" values and have a GenericRelation defined in their model (rather than having another model pointed *at* them). In the example "article.publications", the publications attribute is a ReverseGenericRelatedObjectsDescriptor instance. """ def __init__(self, field): self.field = field def __get__(self, instance, instance_type=None): if instance is None: return self # This import is done here to avoid circular import importing this module from django.contrib.contenttypes.models import ContentType # Dynamically create a class that subclasses the related model's # default manager. rel_model = self.field.rel.to superclass = rel_model._default_manager.__class__ RelatedManager = create_generic_related_manager(superclass) qn = connection.ops.quote_name manager = RelatedManager( model = rel_model, instance = instance, symmetrical = (self.field.rel.symmetrical and instance.__class__ == rel_model), join_table = qn(self.field.m2m_db_table()), source_col_name = qn(self.field.m2m_column_name()), target_col_name = qn(self.field.m2m_reverse_name()), content_type = ContentType.objects.db_manager(instance._state.db).get_for_model(instance), content_type_field_name = self.field.content_type_field_name, object_id_field_name = self.field.object_id_field_name ) return manager def __set__(self, instance, value): if instance is None: raise AttributeError("Manager must be accessed via instance") manager = self.__get__(instance) manager.clear() for obj in value: manager.add(obj) def create_generic_related_manager(superclass): """ Factory function for a manager that subclasses 'superclass' (which is a Manager) and adds behavior for generic related objects. """ class GenericRelatedObjectManager(superclass): def __init__(self, model=None, core_filters=None, instance=None, symmetrical=None, join_table=None, source_col_name=None, target_col_name=None, content_type=None, content_type_field_name=None, object_id_field_name=None): super(GenericRelatedObjectManager, self).__init__() self.core_filters = core_filters or {} self.model = model self.content_type = content_type self.symmetrical = symmetrical self.instance = instance self.join_table = join_table self.join_table = model._meta.db_table self.source_col_name = source_col_name self.target_col_name = target_col_name self.content_type_field_name = content_type_field_name self.object_id_field_name = object_id_field_name self.pk_val = self.instance._get_pk_val() def get_query_set(self): db = self._db or router.db_for_read(self.model, instance=self.instance) query = { '%s__pk' % self.content_type_field_name : self.content_type.id, '%s__exact' % self.object_id_field_name : self.pk_val, } return superclass.get_query_set(self).using(db).filter(**query) def add(self, *objs): for obj in objs: if not isinstance(obj, self.model): raise TypeError("'%s' instance expected" % self.model._meta.object_name) setattr(obj, self.content_type_field_name, self.content_type) setattr(obj, self.object_id_field_name, self.pk_val) obj.save() add.alters_data = True def remove(self, *objs): db = router.db_for_write(self.model, instance=self.instance) for obj in objs: obj.delete(using=db) remove.alters_data = True def clear(self): db = router.db_for_write(self.model, instance=self.instance) for obj in self.all(): obj.delete(using=db) clear.alters_data = True def create(self, **kwargs): kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val db = router.db_for_write(self.model, instance=self.instance) return super(GenericRelatedObjectManager, self).using(db).create(**kwargs) create.alters_data = True return GenericRelatedObjectManager class GenericRel(ManyToManyRel): def __init__(self, to, related_name=None, limit_choices_to=None, symmetrical=True): self.to = to self.related_name = related_name self.limit_choices_to = limit_choices_to or {} self.symmetrical = symmetrical self.multiple = True self.through = None class BaseGenericInlineFormSet(BaseModelFormSet): """ A formset for generic inline objects to a parent. """ def __init__(self, data=None, files=None, instance=None, save_as_new=None, prefix=None, queryset=None): # Avoid a circular import. from django.contrib.contenttypes.models import ContentType opts = self.model._meta self.instance = instance self.rel_name = '-'.join(( opts.app_label, opts.object_name.lower(), self.ct_field.name, self.ct_fk_field.name, )) if self.instance is None or self.instance.pk is None: qs = self.model._default_manager.none() else: if queryset is None: queryset = self.model._default_manager qs = queryset.filter(**{ self.ct_field.name: ContentType.objects.get_for_model(self.instance), self.ct_fk_field.name: self.instance.pk, }) super(BaseGenericInlineFormSet, self).__init__( queryset=qs, data=data, files=files, prefix=prefix ) #@classmethod def get_default_prefix(cls): opts = cls.model._meta return '-'.join((opts.app_label, opts.object_name.lower(), cls.ct_field.name, cls.ct_fk_field.name, )) get_default_prefix = classmethod(get_default_prefix) def save_new(self, form, commit=True): # Avoid a circular import. from django.contrib.contenttypes.models import ContentType kwargs = { self.ct_field.get_attname(): ContentType.objects.get_for_model(self.instance).pk, self.ct_fk_field.get_attname(): self.instance.pk, } new_obj = self.model(**kwargs) return save_instance(form, new_obj, commit=commit) def generic_inlineformset_factory(model, form=ModelForm, formset=BaseGenericInlineFormSet, ct_field="content_type", fk_field="object_id", fields=None, exclude=None, extra=3, can_order=False, can_delete=True, max_num=None, formfield_callback=lambda f: f.formfield()): """ Returns an ``GenericInlineFormSet`` for the given kwargs. You must provide ``ct_field`` and ``object_id`` if they different from the defaults ``content_type`` and ``object_id`` respectively. """ opts = model._meta # Avoid a circular import. from django.contrib.contenttypes.models import ContentType # if there is no field called `ct_field` let the exception propagate ct_field = opts.get_field(ct_field) if not isinstance(ct_field, models.ForeignKey) or ct_field.rel.to != ContentType: raise Exception("fk_name '%s' is not a ForeignKey to ContentType" % ct_field) fk_field = opts.get_field(fk_field) # let the exception propagate if exclude is not None: exclude = list(exclude) exclude.extend([ct_field.name, fk_field.name]) else: exclude = [ct_field.name, fk_field.name] FormSet = modelformset_factory(model, form=form, formfield_callback=formfield_callback, formset=formset, extra=extra, can_delete=can_delete, can_order=can_order, fields=fields, exclude=exclude, max_num=max_num) FormSet.ct_field = ct_field FormSet.ct_fk_field = fk_field return FormSet class GenericInlineModelAdmin(InlineModelAdmin): ct_field = "content_type" ct_fk_field = "object_id" formset = BaseGenericInlineFormSet def get_formset(self, request, obj=None): if self.declared_fieldsets: fields = flatten_fieldsets(self.declared_fieldsets) else: fields = None if self.exclude is None: exclude = [] else: exclude = list(self.exclude) exclude.extend(self.get_readonly_fields(request, obj)) exclude = exclude or None defaults = { "ct_field": self.ct_field, "fk_field": self.ct_fk_field, "form": self.form, "formfield_callback": curry(self.formfield_for_dbfield, request=request), "formset": self.formset, "extra": self.extra, "can_delete": self.can_delete, "can_order": False, "fields": fields, "max_num": self.max_num, "exclude": exclude } return generic_inlineformset_factory(self.model, **defaults) class GenericStackedInline(GenericInlineModelAdmin): template = 'admin/edit_inline/stacked.html' class GenericTabularInline(GenericInlineModelAdmin): template = 'admin/edit_inline/tabular.html'
apache-2.0
dmulholland/ivy
ivy/utils.py
1
3670
# ------------------------------------------------------------------------------ # This module contains utility functions used throughout the application. # ------------------------------------------------------------------------------ import os import shutil import unicodedata import re import sys from . import hooks # Clear the contents of a directory. def cleardir(dirpath: str): if os.path.isdir(dirpath): for name in os.listdir(dirpath): path = os.path.join(dirpath, name) if os.path.isfile(path): os.remove(path) elif os.path.isdir(path): shutil.rmtree(path) # Copy the contents of 'srcdir' to 'dstdir'. The destination directory will be # created if it does not already exist. If 'noclobber' is true, existing files # will not be overwritten. def copydir(srcdir: str, dstdir: str, noclobber: bool = False): if not os.path.exists(srcdir): return if not os.path.exists(dstdir): os.makedirs(dstdir) for name in os.listdir(srcdir): src = os.path.join(srcdir, name) dst = os.path.join(dstdir, name) if name in ('__pycache__', '.DS_Store'): continue if os.path.isfile(src): copyfile(src, dst, noclobber) elif os.path.isdir(src): copydir(src, dst, noclobber) # Copy the file 'src' as 'dst'. If 'noclobber' is true, an existing 'dst' file # will not be overwritten. This function attempts to avoid unnecessarily # overwriting existing files with identical copies. If 'dst' exists and has # the same size and mtime as 'src', the copy will be aborted. def copyfile(src: str, dst: str, noclobber: bool = False): if os.path.isfile(dst): if noclobber: return if os.path.getmtime(src) == os.path.getmtime(dst): if os.path.getsize(src) == os.path.getsize(dst): return shutil.copy2(src, dst) # Write a string to a file. Creates parent directories if required. def writefile(path: str, content: str): path = os.path.abspath(path) if not os.path.isdir(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) with open(path, 'w', encoding='utf-8') as file: file.write(content) # Default slug-preparation function; returns a slugified version of the # supplied string. This function is used to sanitize url components, etc. def slugify(string: str) -> str: out = unicodedata.normalize('NFKD', string) out = out.encode('ascii', errors='ignore').decode('ascii') out = out.lower() out = out.replace("'", '') out = re.sub(r'[^a-z0-9-]+', '-', out) out = re.sub(r'--+', '-', out) out = out.strip('-') return hooks.filter('slugify', out, string) # A drop-in replacement for the print function that won't choke when # attempting to print unicode characters to a non-unicode terminal. Known # problem characters are replaced with ascii alternatives; any other # unprintable characters are replaced with a '?'. def safeprint(*objects, sep=' ', end='\n', file=sys.stdout): if file.encoding.lower() == 'utf-8': print(*objects, sep=sep, end=end, file=file) else: strings, enc = [], file.encoding for obj in objects: string = str(obj).replace('─', '-').replace('·', '|') string = string.encode(enc, errors='replace').decode(enc) strings.append(string) print(*strings, sep=sep, end=end, file=file) # Print a grey line across the width of the terminal. def termline(): cols, _ = shutil.get_terminal_size() line = '\u001B[90m' + '─' * cols + '\u001B[0m' safeprint(line)
unlicense
emxys1/imx6rex-bombardier-base-linux-3.10.17
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py
11088
3246
# Core.py - Python extension for perf script, core functions # # Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com> # # This software may be distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. from collections import defaultdict def autodict(): return defaultdict(autodict) flag_fields = autodict() symbolic_fields = autodict() def define_flag_field(event_name, field_name, delim): flag_fields[event_name][field_name]['delim'] = delim def define_flag_value(event_name, field_name, value, field_str): flag_fields[event_name][field_name]['values'][value] = field_str def define_symbolic_field(event_name, field_name): # nothing to do, really pass def define_symbolic_value(event_name, field_name, value, field_str): symbolic_fields[event_name][field_name]['values'][value] = field_str def flag_str(event_name, field_name, value): string = "" if flag_fields[event_name][field_name]: print_delim = 0 keys = flag_fields[event_name][field_name]['values'].keys() keys.sort() for idx in keys: if not value and not idx: string += flag_fields[event_name][field_name]['values'][idx] break if idx and (value & idx) == idx: if print_delim and flag_fields[event_name][field_name]['delim']: string += " " + flag_fields[event_name][field_name]['delim'] + " " string += flag_fields[event_name][field_name]['values'][idx] print_delim = 1 value &= ~idx return string def symbol_str(event_name, field_name, value): string = "" if symbolic_fields[event_name][field_name]: keys = symbolic_fields[event_name][field_name]['values'].keys() keys.sort() for idx in keys: if not value and not idx: string = symbolic_fields[event_name][field_name]['values'][idx] break if (value == idx): string = symbolic_fields[event_name][field_name]['values'][idx] break return string trace_flags = { 0x00: "NONE", \ 0x01: "IRQS_OFF", \ 0x02: "IRQS_NOSUPPORT", \ 0x04: "NEED_RESCHED", \ 0x08: "HARDIRQ", \ 0x10: "SOFTIRQ" } def trace_flag_str(value): string = "" print_delim = 0 keys = trace_flags.keys() for idx in keys: if not value and not idx: string += "NONE" break if idx and (value & idx) == idx: if print_delim: string += " | "; string += trace_flags[idx] print_delim = 1 value &= ~idx return string def taskState(state): states = { 0 : "R", 1 : "S", 2 : "D", 64: "DEAD" } if state not in states: return "Unknown" return states[state] class EventHeaders: def __init__(self, common_cpu, common_secs, common_nsecs, common_pid, common_comm): self.cpu = common_cpu self.secs = common_secs self.nsecs = common_nsecs self.pid = common_pid self.comm = common_comm def ts(self): return (self.secs * (10 ** 9)) + self.nsecs def ts_format(self): return "%d.%d" % (self.secs, int(self.nsecs / 1000))
gpl-2.0
SaikWolf/gnuradio
gr-qtgui/apps/qt_digital.py
58
9849
#!/usr/bin/env python # # Copyright 2011 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from gnuradio import gr, digital from gnuradio import blocks from gnuradio import filter from gnuradio import channels from gnuradio import eng_notation import sys try: from gnuradio import qtgui from PyQt4 import QtGui, QtCore import sip except ImportError: print "Error: Program requires PyQt4 and gr-qtgui." sys.exit(1) try: import scipy except ImportError: print "Error: Program requires scipy (see: www.scipy.org)." sys.exit(1) try: from qt_digital_window import Ui_DigitalWindow except ImportError: print "Error: could not find qt_digital_window.py:" print "\t\"Please run: pyuic4 qt_digital_window.ui -o qt_digital_window.py\"" sys.exit(1) class dialog_box(QtGui.QMainWindow): def __init__(self, snkTx, snkRx, fg, parent=None): QtGui.QWidget.__init__(self, parent) self.gui = Ui_DigitalWindow() self.gui.setupUi(self) self.fg = fg self.set_sample_rate(self.fg.sample_rate()) self.set_snr(self.fg.snr()) self.set_frequency(self.fg.frequency_offset()) self.set_time_offset(self.fg.timing_offset()) self.set_gain_mu(self.fg.rx_gain_mu()) self.set_loop_bw(self.fg.loop_bw()) # Add the qtsnk widgets to the hlayout box self.gui.sinkLayout.addWidget(snkTx) self.gui.sinkLayout.addWidget(snkRx) # Connect up some signals self.connect(self.gui.pauseButton, QtCore.SIGNAL("clicked()"), self.pauseFg) self.connect(self.gui.sampleRateEdit, QtCore.SIGNAL("editingFinished()"), self.sampleRateEditText) self.connect(self.gui.snrEdit, QtCore.SIGNAL("editingFinished()"), self.snrEditText) self.connect(self.gui.freqEdit, QtCore.SIGNAL("editingFinished()"), self.freqEditText) self.connect(self.gui.timeEdit, QtCore.SIGNAL("editingFinished()"), self.timeEditText) self.connect(self.gui.gainMuEdit, QtCore.SIGNAL("editingFinished()"), self.gainMuEditText) self.connect(self.gui.alphaEdit, QtCore.SIGNAL("editingFinished()"), self.alphaEditText) def pauseFg(self): if(self.gui.pauseButton.text() == "Pause"): self.fg.stop() self.fg.wait() self.gui.pauseButton.setText("Unpause") else: self.fg.start() self.gui.pauseButton.setText("Pause") # Accessor functions for Gui to manipulate system parameters def set_sample_rate(self, sr): ssr = eng_notation.num_to_str(sr) self.gui.sampleRateEdit.setText(QtCore.QString("%1").arg(ssr)) def sampleRateEditText(self): try: rate = self.gui.sampleRateEdit.text().toAscii() srate = eng_notation.str_to_num(rate) self.fg.set_sample_rate(srate) except RuntimeError: pass # Accessor functions for Gui to manipulate channel model def set_snr(self, snr): self.gui.snrEdit.setText(QtCore.QString("%1").arg(snr)) def set_frequency(self, fo): self.gui.freqEdit.setText(QtCore.QString("%1").arg(fo)) def set_time_offset(self, to): self.gui.timeEdit.setText(QtCore.QString("%1").arg(to)) def snrEditText(self): try: snr = self.gui.snrEdit.text().toDouble()[0] self.fg.set_snr(snr) except RuntimeError: pass def freqEditText(self): try: freq = self.gui.freqEdit.text().toDouble()[0] self.fg.set_frequency_offset(freq) except RuntimeError: pass def timeEditText(self): try: to = self.gui.timeEdit.text().toDouble()[0] self.fg.set_timing_offset(to) except RuntimeError: pass # Accessor functions for Gui to manipulate receiver parameters def set_gain_mu(self, gain): self.gui.gainMuEdit.setText(QtCore.QString("%1").arg(gain)) def set_loop_bw(self, bw): self.gui.alphaEdit.setText(QtCore.QString("%1").arg(bw)) def alphaEditText(self): try: bw = self.gui.alphaEdit.text().toDouble()[0] self.fg.set_loop_bw(bw) except RuntimeError: pass def gainMuEditText(self): try: gain = self.gui.gainMuEdit.text().toDouble()[0] self.fg.set_rx_gain_mu(gain) except RuntimeError: pass class my_top_block(gr.top_block): def __init__(self): gr.top_block.__init__(self) self.qapp = QtGui.QApplication(sys.argv) self._sample_rate = 2000e3 self.sps = 2 self.excess_bw = 0.35 self.gray_code = digital.mod_codes.GRAY_CODE fftsize = 2048 self.data = scipy.random.randint(0, 255, 1000) self.src = blocks.vector_source_b(self.data.tolist(), True) self.mod = digital.dqpsk_mod(self.gray_code, samples_per_symbol=self.sps, excess_bw=self.excess_bw, verbose=False, log=False) self.rrctaps = filter.firdes.root_raised_cosine(1, self.sps, 1, self.excess_bw, 21) self.rx_rrc = filter.fir_filter_ccf(1, self.rrctaps) # Set up the carrier & clock recovery parameters self.arity = 4 self.mu = 0.5 self.gain_mu = 0.05 self.omega = self.sps self.gain_omega = .25 * self.gain_mu * self.gain_mu self.omega_rel_lim = 0.05 self._loop_bw = 2*scipy.pi/100.0 self.fmin = -1000/self.sample_rate() self.fmax = 1000/self.sample_rate() self.receiver = digital.mpsk_receiver_cc(self.arity, 0, self._loop_bw, self.fmin, self.fmax, self.mu, self.gain_mu, self.omega, self.gain_omega, self.omega_rel_lim) self.snr_dB = 15 noise = self.get_noise_voltage(self.snr_dB) self.fo = 100/self.sample_rate() self.to = 1.0 self.channel = channels.channel_model(noise, self.fo, self.to) self.thr = blocks.throttle(gr.sizeof_char, self._sample_rate) self.snk_tx = qtgui.sink_c(fftsize, filter.firdes.WIN_BLACKMAN_hARRIS, 0, self._sample_rate*self.sps, "Tx", True, True, True, True) self.snk_rx = qtgui.sink_c(fftsize, filter.firdes.WIN_BLACKMAN_hARRIS, 0, self._sample_rate, "Rx", True, True, True, True) self.connect(self.src, self.thr, self.mod, self.channel, self.snk_tx) self.connect(self.channel, self.rx_rrc, self.receiver, self.snk_rx) pyTxQt = self.snk_tx.pyqwidget() pyTx = sip.wrapinstance(pyTxQt, QtGui.QWidget) pyRxQt = self.snk_rx.pyqwidget() pyRx = sip.wrapinstance(pyRxQt, QtGui.QWidget) self.main_box = dialog_box(pyTx, pyRx, self); self.main_box.show() def get_noise_voltage(self, SNR): S = 0 # dBm, assuming signal power normalized N = S - SNR # dBm npwr = pow(10.0, N/10.0) # ratio nv = scipy.sqrt(npwr * self.sps) # convert the noise voltage return nv # System Parameters def sample_rate(self): return self._sample_rate def set_sample_rate(self, sr): self._sample_rate = sr # Channel Model Parameters def snr(self): return self.snr_dB def set_snr(self, snr): self.snr_dB = snr noise = self.get_noise_voltage(self.snr_dB) self.channel.set_noise_voltage(noise) def frequency_offset(self): return self.fo * self.sample_rate() def set_frequency_offset(self, fo): self.fo = fo / self.sample_rate() self.channel.set_frequency_offset(self.fo) def timing_offset(self): return self.to def set_timing_offset(self, to): self.to = to self.channel.set_timing_offset(self.to) # Receiver Parameters def rx_gain_mu(self): return self.gain_mu def rx_gain_omega(self): return self.gain_omega def set_rx_gain_mu(self, gain): self.gain_mu = gain self.gain_omega = .25 * self.gain_mu * self.gain_mu self.receiver.set_gain_mu(self.gain_mu) self.receiver.set_gain_omega(self.gain_omega) def set_loop_bw(self, loop_bw): self._loop_bw = bw self.receiver.set_loop_bw(self._loop_bw) def loop_bw(self): return self._loop_bw if __name__ == "__main__": tb = my_top_block(); tb.start() tb.qapp.exec_() tb.stop()
gpl-3.0
vveerava/Openstack
neutron/tests/unit/vmware/test_nsx_opts.py
9
11831
# Copyright 2013 VMware, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import fixtures import mock from oslo.config import cfg from neutron import manager from neutron.openstack.common import uuidutils from neutron.plugins.vmware.api_client import client from neutron.plugins.vmware.api_client import version from neutron.plugins.vmware.common import config # noqa from neutron.plugins.vmware.common import exceptions from neutron.plugins.vmware.common import sync from neutron.plugins.vmware import nsx_cluster from neutron.plugins.vmware.nsxlib import lsn as lsnlib from neutron.tests import base from neutron.tests.unit import vmware BASE_CONF_PATH = vmware.get_fake_conf('neutron.conf.test') NSX_INI_PATH = vmware.get_fake_conf('nsx.ini.basic.test') NSX_INI_FULL_PATH = vmware.get_fake_conf('nsx.ini.full.test') NSX_INI_AGENTLESS_PATH = vmware.get_fake_conf('nsx.ini.agentless.test') NSX_INI_COMBINED_PATH = vmware.get_fake_conf('nsx.ini.combined.test') NVP_INI_DEPR_PATH = vmware.get_fake_conf('nvp.ini.full.test') class NSXClusterTest(base.BaseTestCase): cluster_opts = {'default_tz_uuid': uuidutils.generate_uuid(), 'default_l2_gw_service_uuid': uuidutils.generate_uuid(), 'default_l2_gw_service_uuid': uuidutils.generate_uuid(), 'nsx_user': 'foo', 'nsx_password': 'bar', 'http_timeout': 25, 'retries': 7, 'redirects': 23, 'default_interface_name': 'baz', 'nsx_controllers': ['1.1.1.1:443']} def test_create_cluster(self): cluster = nsx_cluster.NSXCluster(**self.cluster_opts) for (k, v) in self.cluster_opts.iteritems(): self.assertEqual(v, getattr(cluster, k)) def test_create_cluster_default_port(self): opts = self.cluster_opts.copy() opts['nsx_controllers'] = ['1.1.1.1'] cluster = nsx_cluster.NSXCluster(**opts) for (k, v) in self.cluster_opts.iteritems(): self.assertEqual(v, getattr(cluster, k)) def test_create_cluster_missing_required_attribute_raises(self): opts = self.cluster_opts.copy() opts.pop('default_tz_uuid') self.assertRaises(exceptions.InvalidClusterConfiguration, nsx_cluster.NSXCluster, **opts) class ConfigurationTest(base.BaseTestCase): def setUp(self): super(ConfigurationTest, self).setUp() self.useFixture(fixtures.MonkeyPatch( 'neutron.manager.NeutronManager._instance', None)) # Avoid runs of the synchronizer looping call patch_sync = mock.patch.object(sync, '_start_loopingcall') patch_sync.start() def _assert_required_options(self, cluster): self.assertEqual(cluster.nsx_controllers, ['fake_1:443', 'fake_2:443']) self.assertEqual(cluster.default_tz_uuid, 'fake_tz_uuid') self.assertEqual(cluster.nsx_user, 'foo') self.assertEqual(cluster.nsx_password, 'bar') def _assert_extra_options(self, cluster): self.assertEqual(13, cluster.http_timeout) self.assertEqual(12, cluster.redirects) self.assertEqual(11, cluster.retries) self.assertEqual('whatever', cluster.default_l2_gw_service_uuid) self.assertEqual('whatever', cluster.default_l3_gw_service_uuid) self.assertEqual('whatever', cluster.default_interface_name) def test_load_plugin_with_full_options(self): self.config_parse(args=['--config-file', BASE_CONF_PATH, '--config-file', NSX_INI_FULL_PATH]) cfg.CONF.set_override('core_plugin', vmware.PLUGIN_NAME) plugin = manager.NeutronManager().get_plugin() cluster = plugin.cluster self._assert_required_options(cluster) self._assert_extra_options(cluster) def test_load_plugin_with_required_options_only(self): self.config_parse(args=['--config-file', BASE_CONF_PATH, '--config-file', NSX_INI_PATH]) cfg.CONF.set_override('core_plugin', vmware.PLUGIN_NAME) plugin = manager.NeutronManager().get_plugin() self._assert_required_options(plugin.cluster) def test_defaults(self): self.assertEqual(5000, cfg.CONF.NSX.max_lp_per_bridged_ls) self.assertEqual(256, cfg.CONF.NSX.max_lp_per_overlay_ls) self.assertEqual(10, cfg.CONF.NSX.concurrent_connections) self.assertEqual('access_network', cfg.CONF.NSX.metadata_mode) self.assertEqual('stt', cfg.CONF.NSX.default_transport_type) self.assertEqual('service', cfg.CONF.NSX.replication_mode) self.assertIsNone(cfg.CONF.default_tz_uuid) self.assertEqual('admin', cfg.CONF.nsx_user) self.assertEqual('admin', cfg.CONF.nsx_password) self.assertEqual(75, cfg.CONF.http_timeout) self.assertEqual(2, cfg.CONF.retries) self.assertEqual(2, cfg.CONF.redirects) self.assertIsNone(cfg.CONF.nsx_controllers) self.assertIsNone(cfg.CONF.default_l3_gw_service_uuid) self.assertIsNone(cfg.CONF.default_l2_gw_service_uuid) self.assertEqual('breth0', cfg.CONF.default_interface_name) self.assertEqual(900, cfg.CONF.conn_idle_timeout) def test_load_api_extensions(self): self.config_parse(args=['--config-file', BASE_CONF_PATH, '--config-file', NSX_INI_FULL_PATH]) cfg.CONF.set_override('core_plugin', vmware.PLUGIN_NAME) # Load the configuration, and initialize the plugin manager.NeutronManager().get_plugin() self.assertIn('extensions', cfg.CONF.api_extensions_path) def test_agentless_extensions(self): self.config_parse(args=['--config-file', BASE_CONF_PATH, '--config-file', NSX_INI_AGENTLESS_PATH]) cfg.CONF.set_override('core_plugin', vmware.PLUGIN_NAME) self.assertEqual(config.AgentModes.AGENTLESS, cfg.CONF.NSX.agent_mode) # The version returned from NSX does not really matter here with mock.patch.object(client.NsxApiClient, 'get_version', return_value=version.Version("9.9")): with mock.patch.object(lsnlib, 'service_cluster_exists', return_value=True): plugin = manager.NeutronManager().get_plugin() self.assertNotIn('agent', plugin.supported_extension_aliases) self.assertNotIn('dhcp_agent_scheduler', plugin.supported_extension_aliases) self.assertNotIn('lsn', plugin.supported_extension_aliases) def test_agentless_extensions_version_fail(self): self.config_parse(args=['--config-file', BASE_CONF_PATH, '--config-file', NSX_INI_AGENTLESS_PATH]) cfg.CONF.set_override('core_plugin', vmware.PLUGIN_NAME) self.assertEqual(config.AgentModes.AGENTLESS, cfg.CONF.NSX.agent_mode) with mock.patch.object(client.NsxApiClient, 'get_version', return_value=version.Version("3.2")): self.assertRaises(exceptions.NsxPluginException, manager.NeutronManager) def test_agentless_extensions_unmet_deps_fail(self): self.config_parse(args=['--config-file', BASE_CONF_PATH, '--config-file', NSX_INI_AGENTLESS_PATH]) cfg.CONF.set_override('core_plugin', vmware.PLUGIN_NAME) self.assertEqual(config.AgentModes.AGENTLESS, cfg.CONF.NSX.agent_mode) with mock.patch.object(client.NsxApiClient, 'get_version', return_value=version.Version("3.2")): with mock.patch.object(lsnlib, 'service_cluster_exists', return_value=False): self.assertRaises(exceptions.NsxPluginException, manager.NeutronManager) def test_agent_extensions(self): self.config_parse(args=['--config-file', BASE_CONF_PATH, '--config-file', NSX_INI_FULL_PATH]) cfg.CONF.set_override('core_plugin', vmware.PLUGIN_NAME) self.assertEqual(config.AgentModes.AGENT, cfg.CONF.NSX.agent_mode) plugin = manager.NeutronManager().get_plugin() self.assertIn('agent', plugin.supported_extension_aliases) self.assertIn('dhcp_agent_scheduler', plugin.supported_extension_aliases) def test_combined_extensions(self): self.config_parse(args=['--config-file', BASE_CONF_PATH, '--config-file', NSX_INI_COMBINED_PATH]) cfg.CONF.set_override('core_plugin', vmware.PLUGIN_NAME) self.assertEqual(config.AgentModes.COMBINED, cfg.CONF.NSX.agent_mode) with mock.patch.object(client.NsxApiClient, 'get_version', return_value=version.Version("4.2")): with mock.patch.object(lsnlib, 'service_cluster_exists', return_value=True): plugin = manager.NeutronManager().get_plugin() self.assertIn('agent', plugin.supported_extension_aliases) self.assertIn('dhcp_agent_scheduler', plugin.supported_extension_aliases) self.assertIn('lsn', plugin.supported_extension_aliases) class OldNVPConfigurationTest(base.BaseTestCase): def setUp(self): super(OldNVPConfigurationTest, self).setUp() self.useFixture(fixtures.MonkeyPatch( 'neutron.manager.NeutronManager._instance', None)) # Avoid runs of the synchronizer looping call patch_sync = mock.patch.object(sync, '_start_loopingcall') patch_sync.start() def _assert_required_options(self, cluster): self.assertEqual(cluster.nsx_controllers, ['fake_1:443', 'fake_2:443']) self.assertEqual(cluster.nsx_user, 'foo') self.assertEqual(cluster.nsx_password, 'bar') self.assertEqual(cluster.default_tz_uuid, 'fake_tz_uuid') def test_load_plugin_with_deprecated_options(self): self.config_parse(args=['--config-file', BASE_CONF_PATH, '--config-file', NVP_INI_DEPR_PATH]) cfg.CONF.set_override('core_plugin', vmware.PLUGIN_NAME) plugin = manager.NeutronManager().get_plugin() cluster = plugin.cluster # Verify old nvp_* params have been fully parsed self._assert_required_options(cluster) self.assertEqual(3, cluster.http_timeout) self.assertEqual(2, cluster.retries) self.assertEqual(2, cluster.redirects)
apache-2.0
demarle/VTK
Filters/AMR/Testing/Python/TestAMRSliceFilter.py
21
1088
#!/usr/bin/env python # This tests vtkAMRResampleFilter import vtk from vtk.test import Testing from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() def NumCells(out): n =0; for i in range(out.GetNumberOfLevels()): for j in range(out.GetNumberOfDataSets(i)): m = out.GetDataSet(i,j).GetNumberOfCells() #print (i,j,m) n = n+ m return n class TestAMRSliceFilter(Testing.vtkTest): def testMe(self): filename= VTK_DATA_ROOT +"/Data/AMR/Enzo/DD0010/moving7_0010.hierarchy" datafieldname = "TotalEnergy" reader = vtk.vtkAMREnzoReader() reader.SetFileName(filename); reader.SetMaxLevel(10); reader.SetCellArrayStatus(datafieldname,1) filter = vtk.vtkAMRSliceFilter() filter.SetInputConnection(reader.GetOutputPort()) filter.SetNormal(1); filter.SetOffSetFromOrigin(0.5); filter.SetMaxResolution(10); filter.Update() out = filter.GetOutputDataObject(0); self.assertEqual(NumCells(out),456) out.Audit(); if __name__ == "__main__": Testing.main([(TestAMRSliceFilter, 'test')])
bsd-3-clause