repo_name
stringlengths
6
100
path
stringlengths
4
294
copies
stringlengths
1
5
size
stringlengths
4
6
content
stringlengths
606
896k
license
stringclasses
15 values
isohybrid/dotfile
vim/bundle/git:--github.com-klen-python-mode/pylibs/rope/base/history.py
115
8654
from rope.base import exceptions, change, taskhandle class History(object): """A class that holds project history""" def __init__(self, project, maxundos=None): self.project = project self._undo_list = [] self._redo_list = [] self._maxundos = maxundos self._load_history() self.project.data_files.add_write_hook(self.write) self.current_change = None def _load_history(self): if self.save: result = self.project.data_files.read_data( 'history', compress=self.compress, import_=True) if result is not None: to_change = change.DataToChange(self.project) for data in result[0]: self._undo_list.append(to_change(data)) for data in result[1]: self._redo_list.append(to_change(data)) def do(self, changes, task_handle=taskhandle.NullTaskHandle()): """Perform the change and add it to the `self.undo_list` Note that uninteresting changes (changes to ignored files) will not be appended to `self.undo_list`. """ try: self.current_change = changes changes.do(change.create_job_set(task_handle, changes)) finally: self.current_change = None if self._is_change_interesting(changes): self.undo_list.append(changes) self._remove_extra_items() del self.redo_list[:] def _remove_extra_items(self): if len(self.undo_list) > self.max_undos: del self.undo_list[0:len(self.undo_list) - self.max_undos] def _is_change_interesting(self, changes): for resource in changes.get_changed_resources(): if not self.project.is_ignored(resource): return True return False def undo(self, change=None, drop=False, task_handle=taskhandle.NullTaskHandle()): """Redo done changes from the history When `change` is `None`, the last done change will be undone. If change is not `None` it should be an item from `self.undo_list`; this change and all changes that depend on it will be undone. In both cases the list of undone changes will be returned. If `drop` is `True`, the undone change will not be appended to the redo list. """ if not self._undo_list: raise exceptions.HistoryError('Undo list is empty') if change is None: change = self.undo_list[-1] dependencies = self._find_dependencies(self.undo_list, change) self._move_front(self.undo_list, dependencies) self._perform_undos(len(dependencies), task_handle) result = self.redo_list[-len(dependencies):] if drop: del self.redo_list[-len(dependencies):] return result def redo(self, change=None, task_handle=taskhandle.NullTaskHandle()): """Redo undone changes from the history When `change` is `None`, the last undone change will be redone. If change is not `None` it should be an item from `self.redo_list`; this change and all changes that depend on it will be redone. In both cases the list of redone changes will be returned. """ if not self.redo_list: raise exceptions.HistoryError('Redo list is empty') if change is None: change = self.redo_list[-1] dependencies = self._find_dependencies(self.redo_list, change) self._move_front(self.redo_list, dependencies) self._perform_redos(len(dependencies), task_handle) return self.undo_list[-len(dependencies):] def _move_front(self, change_list, changes): for change in changes: change_list.remove(change) change_list.append(change) def _find_dependencies(self, change_list, change): index = change_list.index(change) return _FindChangeDependencies(change_list[index:])() def _perform_undos(self, count, task_handle): for i in range(count): self.current_change = self.undo_list[-1] try: job_set = change.create_job_set(task_handle, self.current_change) self.current_change.undo(job_set) finally: self.current_change = None self.redo_list.append(self.undo_list.pop()) def _perform_redos(self, count, task_handle): for i in range(count): self.current_change = self.redo_list[-1] try: job_set = change.create_job_set(task_handle, self.current_change) self.current_change.do(job_set) finally: self.current_change = None self.undo_list.append(self.redo_list.pop()) def contents_before_current_change(self, file): if self.current_change is None: return None result = self._search_for_change_contents([self.current_change], file) if result is not None: return result if file.exists() and not file.is_folder(): return file.read() else: return None def _search_for_change_contents(self, change_list, file): for change_ in reversed(change_list): if isinstance(change_, change.ChangeSet): result = self._search_for_change_contents(change_.changes, file) if result is not None: return result if isinstance(change_, change.ChangeContents) and \ change_.resource == file: return change_.old_contents def write(self): if self.save: data = [] to_data = change.ChangeToData() self._remove_extra_items() data.append([to_data(change_) for change_ in self.undo_list]) data.append([to_data(change_) for change_ in self.redo_list]) self.project.data_files.write_data('history', data, compress=self.compress) def get_file_undo_list(self, resource): result = [] for change in self.undo_list: if resource in change.get_changed_resources(): result.append(change) return result def __str__(self): return 'History holds %s changes in memory' % \ (len(self.undo_list) + len(self.redo_list)) undo_list = property(lambda self: self._undo_list) redo_list = property(lambda self: self._redo_list) @property def tobe_undone(self): """The last done change if available, `None` otherwise""" if self.undo_list: return self.undo_list[-1] @property def tobe_redone(self): """The last undone change if available, `None` otherwise""" if self.redo_list: return self.redo_list[-1] @property def max_undos(self): if self._maxundos is None: return self.project.prefs.get('max_history_items', 100) else: return self._maxundos @property def save(self): return self.project.prefs.get('save_history', False) @property def compress(self): return self.project.prefs.get('compress_history', False) def clear(self): """Forget all undo and redo information""" del self.undo_list[:] del self.redo_list[:] class _FindChangeDependencies(object): def __init__(self, change_list): self.change = change_list[0] self.change_list = change_list self.changed_resources = set(self.change.get_changed_resources()) def __call__(self): result = [self.change] for change in self.change_list[1:]: if self._depends_on(change, result): result.append(change) self.changed_resources.update(change.get_changed_resources()) return result def _depends_on(self, changes, result): for resource in changes.get_changed_resources(): if resource is None: continue if resource in self.changed_resources: return True for changed in self.changed_resources: if resource.is_folder() and resource.contains(changed): return True if changed.is_folder() and changed.contains(resource): return True return False
bsd-2-clause
pankajp/pyface
pyface/action/action_event.py
2
1201
""" The event passed to an action's 'perform' method. """ # Standard library imports. import time # Enthought library imports. from traits.api import Float, HasTraits, Int class ActionEvent(HasTraits): """ The event passed to an action's 'perform' method. """ #### 'ActionEvent' interface ############################################## # When the action was performed (time.time()). when = Float ########################################################################### # 'object' interface. ########################################################################### def __init__(self, **traits): """ Creates a new action event. Note: Every keyword argument becoames a public attribute of the event. """ # Base-class constructor. super(ActionEvent, self).__init__(**traits) # fixme: We currently allow anything to be tagged onto the event, which # is going to make code very hard to read. self.__dict__.update(traits) # When the action was performed. self.when = time.time() return #### EOF ######################################################################
bsd-3-clause
iacdingping/WebFundamentals
appengine/main.py
44
2895
#!/usr/bin/env python # # Copyright 2014 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import webapp2 import json import logging from datetime import datetime, timedelta from urlparse import urljoin import os import re from google.appengine.ext.webapp.template import render class HomePage(webapp2.RequestHandler): def get(self): self.redirect("/web/", permanent=True) class AllPages(webapp2.RequestHandler): def get(self, path): lang = self.request.get("hl", "en") text = None file_path = os.path.join(os.path.dirname(__file__), "build", "_langs", lang, path) if os.path.isfile(file_path): text = render("wrapper.tpl", {"content": file_path, "lang": lang}) file_path = os.path.join(os.path.dirname(__file__), "build", "_langs", lang, path) + ".html" if text is None and os.path.isfile(file_path): text = render("wrapper.tpl", {"content": file_path, "lang": lang}) file_path = os.path.join(os.path.dirname(__file__), "build", "_langs", lang, path, "index.html") if text is None and os.path.isfile(file_path): text = render("wrapper.tpl", {"content": file_path, "lang": lang}) file_path = os.path.join(os.path.dirname(__file__), "build", "_langs", "en", path) if text is None and os.path.isfile(file_path): text = render("wrapper.tpl", {"content": file_path, "lang": "en"}) file_path = os.path.join(os.path.dirname(__file__), "build", "_langs", "en", path) + ".html" if text is None and os.path.isfile(file_path): text = render("wrapper.tpl", {"content": file_path, "lang": "en"}) file_path = os.path.join(os.path.dirname(__file__), "build", "_langs", "en", path, "index.html") if text is None and os.path.isfile(file_path): text = render("wrapper.tpl", {"content": file_path, "lang": "en"}) if text is None: logging.warning("--- Requested file not found") logging.warning(" - lang: " + lang) logging.warning(" - path: " + path) logging.warning(" - file_path: " + file_path) text = "404 - Requested file not found." self.response.set_status(404) self.response.out.write(text) app = webapp2.WSGIApplication([ ('/web', HomePage), ('/web/(.+)/', AllPages), ('/web/(.*)', AllPages) ], debug=True)
apache-2.0
consulo/consulo-python
plugin/src/main/dist/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_tuple_params.py
324
5577
"""Fixer for function definitions with tuple parameters. def func(((a, b), c), d): ... -> def func(x, d): ((a, b), c) = x ... It will also support lambdas: lambda (x, y): x + y -> lambda t: t[0] + t[1] # The parens are a syntax error in Python 3 lambda (x): x + y -> lambda x: x + y """ # Author: Collin Winter # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Assign, Name, Newline, Number, Subscript, syms def is_docstring(stmt): return isinstance(stmt, pytree.Node) and \ stmt.children[0].type == token.STRING class FixTupleParams(fixer_base.BaseFix): run_order = 4 #use a lower order since lambda is part of other #patterns BM_compatible = True PATTERN = """ funcdef< 'def' any parameters< '(' args=any ')' > ['->' any] ':' suite=any+ > | lambda= lambdef< 'lambda' args=vfpdef< '(' inner=any ')' > ':' body=any > """ def transform(self, node, results): if "lambda" in results: return self.transform_lambda(node, results) new_lines = [] suite = results["suite"] args = results["args"] # This crap is so "def foo(...): x = 5; y = 7" is handled correctly. # TODO(cwinter): suite-cleanup if suite[0].children[1].type == token.INDENT: start = 2 indent = suite[0].children[1].value end = Newline() else: start = 0 indent = u"; " end = pytree.Leaf(token.INDENT, u"") # We need access to self for new_name(), and making this a method # doesn't feel right. Closing over self and new_lines makes the # code below cleaner. def handle_tuple(tuple_arg, add_prefix=False): n = Name(self.new_name()) arg = tuple_arg.clone() arg.prefix = u"" stmt = Assign(arg, n.clone()) if add_prefix: n.prefix = u" " tuple_arg.replace(n) new_lines.append(pytree.Node(syms.simple_stmt, [stmt, end.clone()])) if args.type == syms.tfpdef: handle_tuple(args) elif args.type == syms.typedargslist: for i, arg in enumerate(args.children): if arg.type == syms.tfpdef: # Without add_prefix, the emitted code is correct, # just ugly. handle_tuple(arg, add_prefix=(i > 0)) if not new_lines: return # This isn't strictly necessary, but it plays nicely with other fixers. # TODO(cwinter) get rid of this when children becomes a smart list for line in new_lines: line.parent = suite[0] # TODO(cwinter) suite-cleanup after = start if start == 0: new_lines[0].prefix = u" " elif is_docstring(suite[0].children[start]): new_lines[0].prefix = indent after = start + 1 for line in new_lines: line.parent = suite[0] suite[0].children[after:after] = new_lines for i in range(after+1, after+len(new_lines)+1): suite[0].children[i].prefix = indent suite[0].changed() def transform_lambda(self, node, results): args = results["args"] body = results["body"] inner = simplify_args(results["inner"]) # Replace lambda ((((x)))): x with lambda x: x if inner.type == token.NAME: inner = inner.clone() inner.prefix = u" " args.replace(inner) return params = find_params(args) to_index = map_to_index(params) tup_name = self.new_name(tuple_name(params)) new_param = Name(tup_name, prefix=u" ") args.replace(new_param.clone()) for n in body.post_order(): if n.type == token.NAME and n.value in to_index: subscripts = [c.clone() for c in to_index[n.value]] new = pytree.Node(syms.power, [new_param.clone()] + subscripts) new.prefix = n.prefix n.replace(new) ### Helper functions for transform_lambda() def simplify_args(node): if node.type in (syms.vfplist, token.NAME): return node elif node.type == syms.vfpdef: # These look like vfpdef< '(' x ')' > where x is NAME # or another vfpdef instance (leading to recursion). while node.type == syms.vfpdef: node = node.children[1] return node raise RuntimeError("Received unexpected node %s" % node) def find_params(node): if node.type == syms.vfpdef: return find_params(node.children[1]) elif node.type == token.NAME: return node.value return [find_params(c) for c in node.children if c.type != token.COMMA] def map_to_index(param_list, prefix=[], d=None): if d is None: d = {} for i, obj in enumerate(param_list): trailer = [Subscript(Number(unicode(i)))] if isinstance(obj, list): map_to_index(obj, trailer, d=d) else: d[obj] = prefix + trailer return d def tuple_name(param_list): l = [] for obj in param_list: if isinstance(obj, list): l.append(tuple_name(obj)) else: l.append(obj) return u"_".join(l)
apache-2.0
DylanSecreast/uoregon-cis-portfolio
uoregon-cis-471/project-1b/game.py
8
21232
# game.py # ------- # Licensing Information: Please do not distribute or publish solutions to this # project. You are free to use and extend these projects for educational # purposes. The Pacman AI projects were developed at UC Berkeley, primarily by # John DeNero (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu). # For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html from util import * from util import raiseNotDefined import time, os import traceback try: import boinc _BOINC_ENABLED = True except: _BOINC_ENABLED = False ####################### # Parts worth reading # ####################### class Agent: """ An agent must define a getAction method, but may also define the following methods which will be called if they exist: def registerInitialState(self, state): # inspects the starting state """ def __init__(self, index=0): self.index = index def getAction(self, state): """ The Agent will receive a GameState (from either {pacman, capture, sonar}.py) and must return an action from Directions.{North, South, East, West, Stop} """ raiseNotDefined() class Directions: NORTH = 'North' SOUTH = 'South' EAST = 'East' WEST = 'West' STOP = 'Stop' LEFT = {NORTH: WEST, SOUTH: EAST, EAST: NORTH, WEST: SOUTH, STOP: STOP} RIGHT = dict([(y,x) for x, y in LEFT.items()]) REVERSE = {NORTH: SOUTH, SOUTH: NORTH, EAST: WEST, WEST: EAST, STOP: STOP} class Configuration: """ A Configuration holds the (x,y) coordinate of a character, along with its traveling direction. The convention for positions, like a graph, is that (0,0) is the lower left corner, x increases horizontally and y increases vertically. Therefore, north is the direction of increasing y, or (0,1). """ def __init__(self, pos, direction): self.pos = pos self.direction = direction def getPosition(self): return (self.pos) def getDirection(self): return self.direction def isInteger(self): x,y = self.pos return x == int(x) and y == int(y) def __eq__(self, other): if other == None: return False return (self.pos == other.pos and self.direction == other.direction) def __hash__(self): x = hash(self.pos) y = hash(self.direction) return hash(x + 13 * y) def __str__(self): return "(x,y)="+str(self.pos)+", "+str(self.direction) def generateSuccessor(self, vector): """ Generates a new configuration reached by translating the current configuration by the action vector. This is a low-level call and does not attempt to respect the legality of the movement. Actions are movement vectors. """ x, y= self.pos dx, dy = vector direction = Actions.vectorToDirection(vector) if direction == Directions.STOP: direction = self.direction # There is no stop direction return Configuration((x + dx, y+dy), direction) class AgentState: """ AgentStates hold the state of an agent (configuration, speed, scared, etc). """ def __init__( self, startConfiguration, isPacman ): self.start = startConfiguration self.configuration = startConfiguration self.isPacman = isPacman self.scaredTimer = 0 def __str__( self ): if self.isPacman: return "Pacman: " + str( self.configuration ) else: return "Ghost: " + str( self.configuration ) def __eq__( self, other ): if other == None: return False return self.configuration == other.configuration and self.scaredTimer == other.scaredTimer def __hash__(self): return hash(hash(self.configuration) + 13 * hash(self.scaredTimer)) def copy( self ): state = AgentState( self.start, self.isPacman ) state.configuration = self.configuration state.scaredTimer = self.scaredTimer return state def getPosition(self): if self.configuration == None: return None return self.configuration.getPosition() def getDirection(self): return self.configuration.getDirection() class Grid: """ A 2-dimensional array of objects backed by a list of lists. Data is accessed via grid[x][y] where (x,y) are positions on a Pacman map with x horizontal, y vertical and the origin (0,0) in the bottom left corner. The __str__ method constructs an output that is oriented like a pacman board. """ def __init__(self, width, height, initialValue=False, bitRepresentation=None): if initialValue not in [False, True]: raise Exception('Grids can only contain booleans') self.CELLS_PER_INT = 30 self.width = width self.height = height self.data = [[initialValue for y in range(height)] for x in range(width)] if bitRepresentation: self._unpackBits(bitRepresentation) def __getitem__(self, i): return self.data[i] def __setitem__(self, key, item): self.data[key] = item def __str__(self): out = [[str(self.data[x][y])[0] for x in range(self.width)] for y in range(self.height)] out.reverse() return '\n'.join([''.join(x) for x in out]) def __eq__(self, other): if other == None: return False return self.data == other.data def __hash__(self): # return hash(str(self)) base = 1 h = 0 for l in self.data: for i in l: if i: h += base base *= 2 return hash(h) def copy(self): g = Grid(self.width, self.height) g.data = [x[:] for x in self.data] return g def deepCopy(self): return self.copy() def shallowCopy(self): g = Grid(self.width, self.height) g.data = self.data return g def count(self, item =True ): return sum([x.count(item) for x in self.data]) def asList(self, key = True): list = [] for x in range(self.width): for y in range(self.height): if self[x][y] == key: list.append( (x,y) ) return list def packBits(self): """ Returns an efficient int list representation (width, height, bitPackedInts...) """ bits = [self.width, self.height] currentInt = 0 for i in range(self.height * self.width): bit = self.CELLS_PER_INT - (i % self.CELLS_PER_INT) - 1 x, y = self._cellIndexToPosition(i) if self[x][y]: currentInt += 2 ** bit if (i + 1) % self.CELLS_PER_INT == 0: bits.append(currentInt) currentInt = 0 bits.append(currentInt) return tuple(bits) def _cellIndexToPosition(self, index): x = index / self.height y = index % self.height return x, y def _unpackBits(self, bits): """ Fills in data from a bit-level representation """ cell = 0 for packed in bits: for bit in self._unpackInt(packed, self.CELLS_PER_INT): if cell == self.width * self.height: break x, y = self._cellIndexToPosition(cell) self[x][y] = bit cell += 1 def _unpackInt(self, packed, size): bools = [] if packed < 0: raise ValueError, "must be a positive integer" for i in range(size): n = 2 ** (self.CELLS_PER_INT - i - 1) if packed >= n: bools.append(True) packed -= n else: bools.append(False) return bools def reconstituteGrid(bitRep): if type(bitRep) is not type((1,2)): return bitRep width, height = bitRep[:2] return Grid(width, height, bitRepresentation= bitRep[2:]) #################################### # Parts you shouldn't have to read # #################################### class Actions: """ A collection of static methods for manipulating move actions. """ # Directions _directions = {Directions.NORTH: (0, 1), Directions.SOUTH: (0, -1), Directions.EAST: (1, 0), Directions.WEST: (-1, 0), Directions.STOP: (0, 0)} _directionsAsList = _directions.items() TOLERANCE = .001 def reverseDirection(action): if action == Directions.NORTH: return Directions.SOUTH if action == Directions.SOUTH: return Directions.NORTH if action == Directions.EAST: return Directions.WEST if action == Directions.WEST: return Directions.EAST return action reverseDirection = staticmethod(reverseDirection) def vectorToDirection(vector): dx, dy = vector if dy > 0: return Directions.NORTH if dy < 0: return Directions.SOUTH if dx < 0: return Directions.WEST if dx > 0: return Directions.EAST return Directions.STOP vectorToDirection = staticmethod(vectorToDirection) def directionToVector(direction, speed = 1.0): dx, dy = Actions._directions[direction] return (dx * speed, dy * speed) directionToVector = staticmethod(directionToVector) def getPossibleActions(config, walls): possible = [] x, y = config.pos x_int, y_int = int(x + 0.5), int(y + 0.5) # In between grid points, all agents must continue straight if (abs(x - x_int) + abs(y - y_int) > Actions.TOLERANCE): return [config.getDirection()] for dir, vec in Actions._directionsAsList: dx, dy = vec next_y = y_int + dy next_x = x_int + dx if not walls[next_x][next_y]: possible.append(dir) return possible getPossibleActions = staticmethod(getPossibleActions) def getLegalNeighbors(position, walls): x,y = position x_int, y_int = int(x + 0.5), int(y + 0.5) neighbors = [] for dir, vec in Actions._directionsAsList: dx, dy = vec next_x = x_int + dx if next_x < 0 or next_x == walls.width: continue next_y = y_int + dy if next_y < 0 or next_y == walls.height: continue if not walls[next_x][next_y]: neighbors.append((next_x, next_y)) return neighbors getLegalNeighbors = staticmethod(getLegalNeighbors) def getSuccessor(position, action): dx, dy = Actions.directionToVector(action) x, y = position return (x + dx, y + dy) getSuccessor = staticmethod(getSuccessor) class GameStateData: """ """ def __init__( self, prevState = None ): """ Generates a new data packet by copying information from its predecessor. """ if prevState != None: self.food = prevState.food.shallowCopy() self.capsules = prevState.capsules[:] self.agentStates = self.copyAgentStates( prevState.agentStates ) self.layout = prevState.layout self._eaten = prevState._eaten self.score = prevState.score self._foodEaten = None self._capsuleEaten = None self._agentMoved = None self._lose = False self._win = False self.scoreChange = 0 def deepCopy( self ): state = GameStateData( self ) state.food = self.food.deepCopy() state.layout = self.layout.deepCopy() state._agentMoved = self._agentMoved state._foodEaten = self._foodEaten state._capsuleEaten = self._capsuleEaten return state def copyAgentStates( self, agentStates ): copiedStates = [] for agentState in agentStates: copiedStates.append( agentState.copy() ) return copiedStates def __eq__( self, other ): """ Allows two states to be compared. """ if other == None: return False # TODO Check for type of other if not self.agentStates == other.agentStates: return False if not self.food == other.food: return False if not self.capsules == other.capsules: return False if not self.score == other.score: return False return True def __hash__( self ): """ Allows states to be keys of dictionaries. """ for i, state in enumerate( self.agentStates ): try: int(hash(state)) except TypeError, e: print e #hash(state) return int((hash(tuple(self.agentStates)) + 13*hash(self.food) + 113* hash(tuple(self.capsules)) + 7 * hash(self.score)) % 1048575 ) def __str__( self ): width, height = self.layout.width, self.layout.height map = Grid(width, height) if type(self.food) == type((1,2)): self.food = reconstituteGrid(self.food) for x in range(width): for y in range(height): food, walls = self.food, self.layout.walls map[x][y] = self._foodWallStr(food[x][y], walls[x][y]) for agentState in self.agentStates: if agentState == None: continue if agentState.configuration == None: continue x,y = [int( i ) for i in nearestPoint( agentState.configuration.pos )] agent_dir = agentState.configuration.direction if agentState.isPacman: map[x][y] = self._pacStr( agent_dir ) else: map[x][y] = self._ghostStr( agent_dir ) for x, y in self.capsules: map[x][y] = 'o' return str(map) + ("\nScore: %d\n" % self.score) def _foodWallStr( self, hasFood, hasWall ): if hasFood: return '.' elif hasWall: return '%' else: return ' ' def _pacStr( self, dir ): if dir == Directions.NORTH: return 'v' if dir == Directions.SOUTH: return '^' if dir == Directions.WEST: return '>' return '<' def _ghostStr( self, dir ): return 'G' if dir == Directions.NORTH: return 'M' if dir == Directions.SOUTH: return 'W' if dir == Directions.WEST: return '3' return 'E' def initialize( self, layout, numGhostAgents ): """ Creates an initial game state from a layout array (see layout.py). """ self.food = layout.food.copy() self.capsules = layout.capsules[:] self.layout = layout self.score = 0 self.scoreChange = 0 self.agentStates = [] numGhosts = 0 for isPacman, pos in layout.agentPositions: if not isPacman: if numGhosts == numGhostAgents: continue # Max ghosts reached already else: numGhosts += 1 self.agentStates.append( AgentState( Configuration( pos, Directions.STOP), isPacman) ) self._eaten = [False for a in self.agentStates] class Game: """ The Game manages the control flow, soliciting actions from agents. """ def __init__( self, agents, display, rules, startingIndex=0, muteAgents=False, catchExceptions=False ): self.agentCrashed = False self.agents = agents self.display = display self.rules = rules self.startingIndex = startingIndex self.gameOver = False self.muteAgents = muteAgents self.catchExceptions = catchExceptions self.moveHistory = [] self.totalAgentTimes = [0 for agent in agents] self.totalAgentTimeWarnings = [0 for agent in agents] self.agentTimeout = False def getProgress(self): if self.gameOver: return 1.0 else: return self.rules.getProgress(self) def _agentCrash( self, agentIndex, quiet=False): "Helper method for handling agent crashes" if not quiet: traceback.print_exc() self.gameOver = True self.agentCrashed = True self.rules.agentCrash(self, agentIndex) OLD_STDOUT = None OLD_STDERR = None def mute(self): if not self.muteAgents: return global OLD_STDOUT, OLD_STDERR import cStringIO OLD_STDOUT = sys.stdout OLD_STDERR = sys.stderr sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() def unmute(self): if not self.muteAgents: return global OLD_STDOUT, OLD_STDERR sys.stdout.close() sys.stderr.close() # Revert stdout/stderr to originals sys.stdout = OLD_STDOUT sys.stderr = OLD_STDERR def run( self ): """ Main control loop for game play. """ self.display.initialize(self.state.data) self.numMoves = 0 ###self.display.initialize(self.state.makeObservation(1).data) # inform learning agents of the game start for i in range(len(self.agents)): agent = self.agents[i] if not agent: # this is a null agent, meaning it failed to load # the other team wins self._agentCrash(i, quiet=True) return if ("registerInitialState" in dir(agent)): self.mute() if self.catchExceptions: try: timed_func = TimeoutFunction(agent.registerInitialState, int(self.rules.getMaxStartupTime(i))) try: start_time = time.time() timed_func(self.state.deepCopy()) time_taken = time.time() - start_time self.totalAgentTimes[i] += time_taken except TimeoutFunctionException: print "Agent %d ran out of time on startup!" % i self.unmute() self.agentTimeout = True self._agentCrash(i, quiet=True) return except Exception,data: self.unmute() self._agentCrash(i, quiet=True) return else: agent.registerInitialState(self.state.deepCopy()) ## TODO: could this exceed the total time self.unmute() agentIndex = self.startingIndex numAgents = len( self.agents ) while not self.gameOver: # Fetch the next agent agent = self.agents[agentIndex] move_time = 0 skip_action = False # Generate an observation of the state if 'observationFunction' in dir( agent ): self.mute() if self.catchExceptions: try: timed_func = TimeoutFunction(agent.observationFunction, int(self.rules.getMoveTimeout(agentIndex))) try: start_time = time.time() observation = timed_func(self.state.deepCopy()) except TimeoutFunctionException: skip_action = True move_time += time.time() - start_time self.unmute() except Exception,data: self.unmute() self._agentCrash(agentIndex, quiet=True) return else: observation = agent.observationFunction(self.state.deepCopy()) self.unmute() else: observation = self.state.deepCopy() # Solicit an action action = None self.mute() if self.catchExceptions: try: timed_func = TimeoutFunction(agent.getAction, int(self.rules.getMoveTimeout(agentIndex)) - int(move_time)) try: start_time = time.time() if skip_action: raise TimeoutFunctionException() action = timed_func( observation ) except TimeoutFunctionException: print "Agent %d timed out on a single move!" % agentIndex self.agentTimeout = True self.unmute() self._agentCrash(agentIndex, quiet=True) return move_time += time.time() - start_time if move_time > self.rules.getMoveWarningTime(agentIndex): self.totalAgentTimeWarnings[agentIndex] += 1 print "Agent %d took too long to make a move! This is warning %d" % (agentIndex, self.totalAgentTimeWarnings[agentIndex]) if self.totalAgentTimeWarnings[agentIndex] > self.rules.getMaxTimeWarnings(agentIndex): print "Agent %d exceeded the maximum number of warnings: %d" % (agentIndex, self.totalAgentTimeWarnings[agentIndex]) self.agentTimeout = True self.unmute() self._agentCrash(agentIndex, quiet=True) self.totalAgentTimes[agentIndex] += move_time #print "Agent: %d, time: %f, total: %f" % (agentIndex, move_time, self.totalAgentTimes[agentIndex]) if self.totalAgentTimes[agentIndex] > self.rules.getMaxTotalTime(agentIndex): print "Agent %d ran out of time! (time: %1.2f)" % (agentIndex, self.totalAgentTimes[agentIndex]) self.agentTimeout = True self.unmute() self._agentCrash(agentIndex, quiet=True) return self.unmute() except Exception,data: self.unmute() self._agentCrash(agentIndex) return else: action = agent.getAction(observation) self.unmute() # Execute the action self.moveHistory.append( (agentIndex, action) ) if self.catchExceptions: try: self.state = self.state.generateSuccessor( agentIndex, action ) except Exception,data: self._agentCrash(agentIndex) return else: self.state = self.state.generateSuccessor( agentIndex, action ) # Change the display self.display.update( self.state.data ) ###idx = agentIndex - agentIndex % 2 + 1 ###self.display.update( self.state.makeObservation(idx).data ) # Allow for game specific conditions (winning, losing, etc.) self.rules.process(self.state, self) # Track progress if agentIndex == numAgents + 1: self.numMoves += 1 # Next agent agentIndex = ( agentIndex + 1 ) % numAgents if _BOINC_ENABLED: boinc.set_fraction_done(self.getProgress()) # inform a learning agent of the game result for agent in self.agents: if "final" in dir( agent ) : try: self.mute() agent.final( self.state ) self.unmute() except Exception,data: if not self.catchExceptions: raise self.unmute() print "Exception",data self._agentCrash(agent.index) return self.display.finish()
gpl-3.0
aioue/ansible
lib/ansible/modules/network/f5/bigip_snmp_trap.py
41
14161
#!/usr/bin/python # # Copyright 2017 F5 Networks Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = { 'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.0' } DOCUMENTATION = ''' module: bigip_snmp_trap short_description: Manipulate SNMP trap information on a BIG-IP. description: - Manipulate SNMP trap information on a BIG-IP. version_added: 2.4 options: name: description: - Name of the SNMP configuration endpoint. required: True snmp_version: description: - Specifies to which Simple Network Management Protocol (SNMP) version the trap destination applies. choices: - 1 - 2c community: description: - Specifies the community name for the trap destination. destination: description: - Specifies the address for the trap destination. This can be either an IP address or a hostname. port: description: - Specifies the port for the trap destination. network: description: - Specifies the name of the trap network. This option is not supported in versions of BIG-IP < 12.1.0. If used on versions < 12.1.0, it will simply be ignored. choices: - other - management - default state: description: - When C(present), ensures that the cloud connector exists. When C(absent), ensures that the cloud connector does not exist. default: present choices: - present - absent notes: - Requires the f5-sdk Python package on the host. This is as easy as pip install f5-sdk. - This module only supports version v1 and v2c of SNMP. - The C(network) option is not supported on versions of BIG-IP < 12.1.0 because the platform did not support that option until 12.1.0. If used on versions < 12.1.0, it will simply be ignored. extends_documentation_fragment: f5 requirements: - f5-sdk >= 2.2.0 author: - Tim Rupp (@caphrim007) ''' EXAMPLES = ''' - name: Create snmp v1 trap bigip_snmp_trap: community: "general" destination: "1.2.3.4" name: "my-trap1" network: "management" port: "9000" snmp_version: "1" server: "lb.mydomain.com" user: "admin" password: "secret" delegate_to: localhost - name: Create snmp v2 trap bigip_snmp_trap: community: "general" destination: "5.6.7.8" name: "my-trap2" network: "default" port: "7000" snmp_version: "2c" server: "lb.mydomain.com" user: "admin" password: "secret" delegate_to: localhost ''' RETURN = ''' snmp_version: description: The new C(snmp_version) configured on the remote device. returned: changed and success type: string sample: "2c" community: description: The new C(community) name for the trap destination. returned: changed and success type: list sample: "secret" destination: description: The new address for the trap destination in either IP or hostname form. returned: changed and success type: string sample: "1.2.3.4" port: description: The new C(port) of the trap destination. returned: changed and success type: string sample: "900" network: description: The new name of the network the SNMP trap is on. returned: changed and success type: string sample: "management" ''' from distutils.version import LooseVersion from ansible.module_utils.f5_utils import ( AnsibleF5Client, AnsibleF5Parameters, HAS_F5SDK, F5ModuleError, iControlUnexpectedHTTPError ) class Parameters(AnsibleF5Parameters): api_map = { 'version': 'snmp_version', 'community': 'community', 'host': 'destination' } @property def snmp_version(self): if self._values['snmp_version'] is None: return None return str(self._values['snmp_version']) @property def port(self): if self._values['port'] is None: return None return int(self._values['port']) def to_return(self): result = {} for returnable in self.returnables: result[returnable] = getattr(self, returnable) result = self._filter_params(result) return result def api_params(self): result = {} for api_attribute in self.api_attributes: if self.api_map is not None and api_attribute in self.api_map: result[api_attribute] = getattr(self, self.api_map[api_attribute]) else: result[api_attribute] = getattr(self, api_attribute) result = self._filter_params(result) return result class NetworkedParameters(Parameters): updatables = [ 'snmp_version', 'community', 'destination', 'port', 'network' ] returnables = [ 'snmp_version', 'community', 'destination', 'port', 'network' ] api_attributes = [ 'version', 'community', 'host', 'port', 'network' ] @property def network(self): if self._values['network'] is None: return None network = str(self._values['network']) if network == 'management': return 'mgmt' elif network == 'default': return '' else: return network class NonNetworkedParameters(Parameters): updatables = [ 'snmp_version', 'community', 'destination', 'port' ] returnables = [ 'snmp_version', 'community', 'destination', 'port' ] api_attributes = [ 'version', 'community', 'host', 'port' ] @property def network(self): return None class ModuleManager(object): def __init__(self, client): self.client = client def exec_module(self): if self.is_version_non_networked(): manager = NonNetworkedManager(self.client) else: manager = NetworkedManager(self.client) return manager.exec_module() def is_version_non_networked(self): """Checks to see if the TMOS version is less than 13 Anything less than BIG-IP 13.x does not support users on different partitions. :return: Bool """ version = self.client.api.tmos_version if LooseVersion(version) < LooseVersion('12.1.0'): return True else: return False class BaseManager(object): def __init__(self, client): self.client = client self.have = None def exec_module(self): changed = False result = dict() state = self.want.state try: if state == "present": changed = self.present() elif state == "absent": changed = self.absent() except iControlUnexpectedHTTPError as e: raise F5ModuleError(str(e)) changes = self.changes.to_return() result.update(**changes) result.update(dict(changed=changed)) return result def exists(self): result = self.client.api.tm.sys.snmp.traps_s.trap.exists( name=self.want.name, partition=self.want.partition ) return result def present(self): if self.exists(): return self.update() else: return self.create() def create(self): self._set_changed_options() if self.client.check_mode: return True if all(getattr(self.want, v) is None for v in self.required_resources): raise F5ModuleError( "You must specify at least one of " ', '.join(self.required_resources) ) self.create_on_device() return True def should_update(self): result = self._update_changed_options() if result: return True return False def update(self): self.have = self.read_current_from_device() if not self.should_update(): return False if self.client.check_mode: return True self.update_on_device() return True def update_on_device(self): params = self.want.api_params() result = self.client.api.tm.sys.snmp.traps_s.trap.load( name=self.want.name, partition=self.want.partition ) result.modify(**params) def create_on_device(self): params = self.want.api_params() self.client.api.tm.sys.snmp.traps_s.trap.create( name=self.want.name, partition=self.want.partition, **params ) def absent(self): if self.exists(): return self.remove() return False def remove(self): if self.client.check_mode: return True self.remove_from_device() if self.exists(): raise F5ModuleError("Failed to delete the snmp trap") return True def remove_from_device(self): result = self.client.api.tm.sys.snmp.traps_s.trap.load( name=self.want.name, partition=self.want.partition ) if result: result.delete() class NetworkedManager(BaseManager): def __init__(self, client): super(NetworkedManager, self).__init__(client) self.required_resources = [ 'version', 'community', 'destination', 'port', 'network' ] self.want = NetworkedParameters(self.client.module.params) self.changes = NetworkedParameters() def _set_changed_options(self): changed = {} for key in NetworkedParameters.returnables: if getattr(self.want, key) is not None: changed[key] = getattr(self.want, key) if changed: self.changes = NetworkedParameters(changed) def _update_changed_options(self): changed = {} for key in NetworkedParameters.updatables: if getattr(self.want, key) is not None: attr1 = getattr(self.want, key) attr2 = getattr(self.have, key) if attr1 != attr2: changed[key] = attr1 if changed: self.changes = NetworkedParameters(changed) return True return False def read_current_from_device(self): resource = self.client.api.tm.sys.snmp.traps_s.trap.load( name=self.want.name, partition=self.want.partition ) result = resource.attrs self._ensure_network(result) return NetworkedParameters(result) def _ensure_network(self, result): # BIG-IP's value for "default" is that the key does not # exist. This conflicts with our purpose of having a key # not exist (which we equate to "i dont want to change that" # therefore, if we load the information from BIG-IP and # find that there is no 'network' key, that is BIG-IP's # way of saying that the network value is "default" if 'network' not in result: result['network'] = 'default' class NonNetworkedManager(BaseManager): def __init__(self, client): super(NonNetworkedManager, self).__init__(client) self.required_resources = [ 'version', 'community', 'destination', 'port' ] self.want = NonNetworkedParameters(self.client.module.params) self.changes = NonNetworkedParameters() def _set_changed_options(self): changed = {} for key in NonNetworkedParameters.returnables: if getattr(self.want, key) is not None: changed[key] = getattr(self.want, key) if changed: self.changes = NonNetworkedParameters(changed) def _update_changed_options(self): changed = {} for key in NonNetworkedParameters.updatables: if getattr(self.want, key) is not None: attr1 = getattr(self.want, key) attr2 = getattr(self.have, key) if attr1 != attr2: changed[key] = attr1 if changed: self.changes = NonNetworkedParameters(changed) return True return False def read_current_from_device(self): resource = self.client.api.tm.sys.snmp.traps_s.trap.load( name=self.want.name, partition=self.want.partition ) result = resource.attrs return NonNetworkedParameters(result) class ArgumentSpec(object): def __init__(self): self.supports_check_mode = True self.argument_spec = dict( name=dict( required=True ), snmp_version=dict( choices=['1', '2c'] ), community=dict(), destination=dict(), port=dict(), network=dict( choices=['other', 'management', 'default'] ), state=dict( default='present', choices=['absent', 'present'] ) ) self.f5_product_name = 'bigip' def main(): if not HAS_F5SDK: raise F5ModuleError("The python f5-sdk module is required") spec = ArgumentSpec() client = AnsibleF5Client( argument_spec=spec.argument_spec, supports_check_mode=spec.supports_check_mode, f5_product_name=spec.f5_product_name ) mm = ModuleManager(client) results = mm.exec_module() client.module.exit_json(**results) if __name__ == '__main__': main()
gpl-3.0
suto/infernal-twin
build/reportlab/src/reportlab/pdfbase/_fontdata_enc_macroman.py
56
2934
MacRomanEncoding = ( None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'space', 'exclam', 'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand', 'quotesingle', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright', 'asciicircum', 'underscore', 'grave', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright', 'asciitilde', None, 'Adieresis', 'Aring', 'Ccedilla', 'Eacute', 'Ntilde', 'Odieresis', 'Udieresis', 'aacute', 'agrave', 'acircumflex', 'adieresis', 'atilde', 'aring', 'ccedilla', 'eacute', 'egrave', 'ecircumflex', 'edieresis', 'iacute', 'igrave', 'icircumflex', 'idieresis', 'ntilde', 'oacute', 'ograve', 'ocircumflex', 'odieresis', 'otilde', 'uacute', 'ugrave', 'ucircumflex', 'udieresis', 'dagger', 'degree', 'cent', 'sterling', 'section', 'bullet', 'paragraph', 'germandbls', 'registered', 'copyright', 'trademark', 'acute', 'dieresis', None, 'AE', 'Oslash', None, 'plusminus', None, None, 'yen', 'mu', None, None, None, None, None, 'ordfeminine', 'ordmasculine', None, 'ae', 'oslash', 'questiondown', 'exclamdown', 'logicalnot', None, 'florin', None, None, 'guillemotleft', 'guillemotright', 'ellipsis', 'space', 'Agrave', 'Atilde', 'Otilde', 'OE', 'oe', 'endash', 'emdash', 'quotedblleft', 'quotedblright', 'quoteleft', 'quoteright', 'divide', None, 'ydieresis', 'Ydieresis', 'fraction', 'currency', 'guilsinglleft', 'guilsinglright', 'fi', 'fl', 'daggerdbl', 'periodcentered', 'quotesinglbase', 'quotedblbase', 'perthousand', 'Acircumflex', 'Ecircumflex', 'Aacute', 'Edieresis', 'Egrave', 'Iacute', 'Icircumflex', 'Idieresis', 'Igrave', 'Oacute', 'Ocircumflex', None, 'Ograve', 'Uacute', 'Ucircumflex', 'Ugrave', 'dotlessi', 'circumflex', 'tilde', 'macron', 'breve', 'dotaccent', 'ring', 'cedilla', 'hungarumlaut', 'ogonek', 'caron')
gpl-3.0
1986ks/chainer
tests/cupy_tests/math_tests/test_explog.py
12
1221
import unittest import numpy from cupy import testing @testing.gpu class TestExplog(unittest.TestCase): _multiprocess_can_split_ = True @testing.for_all_dtypes() @testing.numpy_cupy_allclose(atol=1e-5) def check_unary(self, name, xp, dtype): numpy.seterr(divide='ignore') a = testing.shaped_arange((2, 3), xp, dtype) return getattr(xp, name)(a) @testing.for_all_dtypes() @testing.numpy_cupy_allclose(atol=1e-5) def check_binary(self, name, xp, dtype): a = testing.shaped_arange((2, 3), xp, dtype) b = testing.shaped_reverse_arange((2, 3), xp, dtype) return getattr(xp, name)(a, b) def test_exp(self): self.check_unary('exp') def test_expm1(self): self.check_unary('expm1') def test_exp2(self): self.check_unary('exp2') def test_log(self): self.check_unary('log') def test_log10(self): self.check_unary('log10') def test_log2(self): self.check_unary('log2') def test_log1p(self): self.check_unary('log1p') def test_logaddexp(self): self.check_binary('logaddexp') def test_logaddexp2(self): self.check_binary('logaddexp2')
mit
DSMan195276/protura-binutils
gdb/testsuite/gdb.python/py-events.py
46
4480
# Copyright (C) 2010-2015 Free Software Foundation, Inc. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # This file is part of the GDB testsuite. It tests python pretty # printers. import gdb def signal_stop_handler (event): if (isinstance (event, gdb.StopEvent)): print ("event type: stop") if (isinstance (event, gdb.SignalEvent)): print ("stop reason: signal") print ("stop signal: %s" % (event.stop_signal)) if ( event.inferior_thread is not None) : print ("thread num: %s" % (event.inferior_thread.num)) def breakpoint_stop_handler (event): if (isinstance (event, gdb.StopEvent)): print ("event type: stop") if (isinstance (event, gdb.BreakpointEvent)): print ("stop reason: breakpoint") print ("first breakpoint number: %s" % (event.breakpoint.number)) for bp in event.breakpoints: print ("breakpoint number: %s" % (bp.number)) if ( event.inferior_thread is not None) : print ("thread num: %s" % (event.inferior_thread.num)) else: print ("all threads stopped") def exit_handler (event): assert (isinstance (event, gdb.ExitedEvent)) print ("event type: exit") print ("exit code: %d" % (event.exit_code)) print ("exit inf: %d" % (event.inferior.num)) print ("dir ok: %s" % str('exit_code' in dir(event))) def continue_handler (event): assert (isinstance (event, gdb.ContinueEvent)) print ("event type: continue") if ( event.inferior_thread is not None) : print ("thread num: %s" % (event.inferior_thread.num)) def new_objfile_handler (event): assert (isinstance (event, gdb.NewObjFileEvent)) print ("event type: new_objfile") print ("new objfile name: %s" % (event.new_objfile.filename)) def clear_objfiles_handler (event): assert (isinstance (event, gdb.ClearObjFilesEvent)) print ("event type: clear_objfiles") print ("progspace: %s" % (event.progspace.filename)) def inferior_call_handler (event): if (isinstance (event, gdb.InferiorCallPreEvent)): print ("event type: pre-call") elif (isinstance (event, gdb.InferiorCallPostEvent)): print ("event type: post-call") else: assert False print ("ptid: %s" % (event.ptid,)) print ("address: 0x%x" % (event.address)) def register_changed_handler (event): assert (isinstance (event, gdb.RegisterChangedEvent)) print ("event type: register-changed") assert (isinstance (event.frame, gdb.Frame)) print ("frame: %s" % (event.frame)) print ("num: %s" % (event.regnum)) def memory_changed_handler (event): assert (isinstance (event, gdb.MemoryChangedEvent)) print ("event type: memory-changed") print ("address: %s" % (event.address)) print ("length: %s" % (event.length)) class test_events (gdb.Command): """Test events.""" def __init__ (self): gdb.Command.__init__ (self, "test-events", gdb.COMMAND_STACK) def invoke (self, arg, from_tty): gdb.events.stop.connect (signal_stop_handler) gdb.events.stop.connect (breakpoint_stop_handler) gdb.events.exited.connect (exit_handler) gdb.events.cont.connect (continue_handler) gdb.events.inferior_call.connect (inferior_call_handler) gdb.events.memory_changed.connect (memory_changed_handler) gdb.events.register_changed.connect (register_changed_handler) print ("Event testers registered.") test_events () class test_newobj_events (gdb.Command): """NewObj events.""" def __init__ (self): gdb.Command.__init__ (self, "test-objfile-events", gdb.COMMAND_STACK) def invoke (self, arg, from_tty): gdb.events.new_objfile.connect (new_objfile_handler) gdb.events.clear_objfiles.connect (clear_objfiles_handler) print ("Object file events registered.") test_newobj_events ()
gpl-2.0
afb/0install
zeroinstall/injector/handler.py
1
9160
""" Integrates download callbacks with an external mainloop. While things are being downloaded, Zero Install returns control to your program. Your mainloop is responsible for monitoring the state of the downloads and notifying Zero Install when they are complete. To do this, you supply a L{Handler} to the L{policy}. """ # Copyright (C) 2009, Thomas Leonard # See the README file for details, or visit http://0install.net. from __future__ import print_function from zeroinstall import _, logger import sys if sys.version_info[0] < 3: import __builtin__ as builtins else: import builtins from zeroinstall import SafeException from zeroinstall import support from zeroinstall.support import tasks from zeroinstall.injector import download class NoTrustedKeys(SafeException): """Thrown by L{Handler.confirm_import_feed} on failure.""" pass class Handler(object): """ A Handler is used to interact with the user (e.g. to confirm keys, display download progress, etc). @ivar monitored_downloads: set of downloads in progress @type monitored_downloads: {L{download.Download}} @ivar n_completed_downloads: number of downloads which have finished for GUIs, etc (can be reset as desired). @type n_completed_downloads: int @ivar total_bytes_downloaded: informational counter for GUIs, etc (can be reset as desired). Updated when download finishes. @type total_bytes_downloaded: int @ivar dry_run: don't write or execute any files, just print notes about what we would have done to stdout @type dry_run: bool """ __slots__ = ['monitored_downloads', 'dry_run', 'total_bytes_downloaded', 'n_completed_downloads'] def __init__(self, mainloop = None, dry_run = False): """@type dry_run: bool""" self.monitored_downloads = set() self.dry_run = dry_run self.n_completed_downloads = 0 self.total_bytes_downloaded = 0 def monitor_download(self, dl): """Called when a new L{download} is started. This is mainly used by the GUI to display the progress bar. @type dl: L{zeroinstall.injector.download.Download}""" self.monitored_downloads.add(dl) self.downloads_changed() @tasks.async def download_done_stats(): yield dl.downloaded # NB: we don't check for exceptions here; someone else should be doing that try: self.n_completed_downloads += 1 self.total_bytes_downloaded += dl.get_bytes_downloaded_so_far() self.monitored_downloads.remove(dl) self.downloads_changed() except Exception as ex: self.report_error(ex) download_done_stats() def impl_added_to_store(self, impl): """Called by the L{fetch.Fetcher} when adding an implementation. The GUI uses this to update its display. @param impl: the implementation which has been added @type impl: L{model.Implementation}""" pass def downloads_changed(self): """This is just for the GUI to override to update its display.""" pass @tasks.async def confirm_import_feed(self, pending, valid_sigs, retval): """Sub-classes should override this method to interact with the user about new feeds. If multiple feeds need confirmation, L{trust.TrustMgr.confirm_keys} will only invoke one instance of this method at a time. @param pending: the new feed to be imported @type pending: L{PendingFeed} @param valid_sigs: maps signatures to a list of fetchers collecting information about the key @type valid_sigs: {L{gpg.ValidSig} : L{fetch.KeyInfoFetcher}} @since: 0.42""" from zeroinstall.injector import trust assert valid_sigs domain = trust.domain_from_url(pending.url) # Ask on stderr, because we may be writing XML to stdout print(_("Feed: %s") % pending.url, file=sys.stderr) print(_("The feed is correctly signed with the following keys:"), file=sys.stderr) for x in valid_sigs: print("-", x, file=sys.stderr) def text(parent): text = "" for node in parent.childNodes: if node.nodeType == node.TEXT_NODE: text = text + node.data return text shown = set() key_info_fetchers = valid_sigs.values() while key_info_fetchers: old_kfs = key_info_fetchers key_info_fetchers = [] for kf in old_kfs: infos = set(kf.info) - shown if infos: if len(valid_sigs) > 1: print("%s: " % kf.fingerprint) for key_info in infos: print("-", text(key_info), file=sys.stderr) shown.add(key_info) if kf.blocker: key_info_fetchers.append(kf) if key_info_fetchers: for kf in key_info_fetchers: print(kf.status, file=sys.stderr) stdin = tasks.InputBlocker(0, 'console') blockers = [kf.blocker for kf in key_info_fetchers] + [stdin] yield blockers for b in blockers: try: tasks.check(b) except Exception as ex: logger.warning(_("Failed to get key info: %s"), ex) if stdin.happened: print(_("Skipping remaining key lookups due to input from user"), file=sys.stderr) break if not shown: print(_("Warning: Nothing known about this key!"), file=sys.stderr) if len(valid_sigs) == 1: print(_("Do you want to trust this key to sign feeds from '%s'?") % domain, file=sys.stderr) else: print(_("Do you want to trust all of these keys to sign feeds from '%s'?") % domain, file=sys.stderr) while True: print(_("Trust [Y/N] "), end=' ', file=sys.stderr) sys.stderr.flush() i = support.raw_input() if not i: continue if i in 'Nn': raise NoTrustedKeys(_('Not signed with a trusted key')) if i in 'Yy': break trust.trust_db._dry_run = self.dry_run retval.extend([key.fingerprint for key in valid_sigs]) @tasks.async def confirm_install(self, msg): """We need to check something with the user before continuing with the install. @raise download.DownloadAborted: if the user cancels""" yield print(msg, file=sys.stderr) while True: sys.stderr.write(_("Install [Y/N] ")) sys.stderr.flush() i = support.raw_input() if not i: continue if i in 'Nn': raise download.DownloadAborted() if i in 'Yy': break def report_error(self, exception, tb = None): """Report an exception to the user. @param exception: the exception to report @type exception: L{SafeException} @param tb: optional traceback @since: 0.25""" import logging logger.warning("%s", str(exception) or type(exception), exc_info = (exception, exception, tb) if logger.isEnabledFor(logging.INFO) else None) class ConsoleHandler(Handler): """A Handler that displays progress on stderr (a tty). (we use stderr because we use stdout to talk to the OCaml process) @since: 0.44""" last_msg_len = None update = None disable_progress = 0 screen_width = None # While we are displaying progress, we override builtins.print to clear the display first. original_print = None def downloads_changed(self): if self.monitored_downloads and self.update is None: if self.screen_width is None: try: import curses curses.setupterm() self.screen_width = curses.tigetnum('cols') or 80 except Exception as ex: logger.info("Failed to initialise curses library: %s", ex) self.screen_width = 80 self.show_progress() self.original_print = print builtins.print = self.print self.update = tasks.get_loop().call_repeatedly(0.2, self.show_progress) elif len(self.monitored_downloads) == 0: if self.update: self.update.cancel() self.update = None builtins.print = self.original_print self.original_print = None self.clear_display() def show_progress(self): if not self.monitored_downloads: return urls = [(dl.url, dl) for dl in self.monitored_downloads] if self.disable_progress: return screen_width = self.screen_width - 2 item_width = max(16, screen_width // len(self.monitored_downloads)) url_width = item_width - 7 msg = "" for url, dl in sorted(urls): so_far = dl.get_bytes_downloaded_so_far() if url.endswith('/latest.xml'): url = url[:-10] # remove latest.xml from mirror URLs leaf = url.rsplit('/', 1)[-1] if len(leaf) >= url_width: display = leaf[:url_width] else: display = url[-url_width:] if dl.expected_size: msg += "[%s %d%%] " % (display, int(so_far * 100 / dl.expected_size)) else: msg += "[%s] " % (display) msg = msg[:screen_width] if self.last_msg_len is None: sys.stderr.write(msg) else: sys.stderr.write(chr(13) + msg) if len(msg) < self.last_msg_len: sys.stderr.write(" " * (self.last_msg_len - len(msg))) self.last_msg_len = len(msg) sys.stderr.flush() return def clear_display(self): if self.last_msg_len != None: sys.stderr.write(chr(13) + " " * self.last_msg_len + chr(13)) sys.stderr.flush() self.last_msg_len = None def report_error(self, exception, tb = None): self.clear_display() Handler.report_error(self, exception, tb) def confirm_import_feed(self, pending, valid_sigs, retval): self.clear_display() self.disable_progress += 1 blocker = Handler.confirm_import_feed(self, pending, valid_sigs, retval) @tasks.async def enable(): yield blocker self.disable_progress -= 1 self.show_progress() enable() return blocker def print(self, *args, **kwargs): self.clear_display() self.original_print(*args, **kwargs)
lgpl-2.1
OpenDroneMap/WebODM
app/api/common.py
1
1763
from django.core.exceptions import ObjectDoesNotExist, SuspiciousFileOperation from rest_framework import exceptions import os from app import models def get_and_check_project(request, project_pk, perms=('view_project',)): """ Django comes with a standard `model level` permission system. You can check whether users are logged-in and have privileges to act on things model wise (can a user add a project? can a user view projects?). Django-guardian adds a `row level` permission system. Now not only can you decide whether a user can add a project or view projects, you can specify exactly which projects a user has or has not access to. This brings up the reason the following function: tasks are part of a project, and it would add a tremendous headache (and redundancy) to specify row level permissions for each task. Instead, we check the row level permissions of the project to which a task belongs to. Perhaps this could be added as a django-rest filter? Retrieves a project and raises an exception if the current user has no access to it. """ try: project = models.Project.objects.get(pk=project_pk, deleting=False) for perm in perms: if not request.user.has_perm(perm, project): raise ObjectDoesNotExist() except ObjectDoesNotExist: raise exceptions.NotFound() return project def path_traversal_check(unsafe_path, known_safe_path): known_safe_path = os.path.abspath(known_safe_path) unsafe_path = os.path.abspath(unsafe_path) if (os.path.commonprefix([known_safe_path, unsafe_path]) != known_safe_path): raise SuspiciousFileOperation("{} is not safe".format(unsafe_path)) # Passes the check return unsafe_path
mpl-2.0
slank/ansible
lib/ansible/module_utils/api.py
106
3560
# # (c) 2015 Brian Ccoa, <bcoca@ansible.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # """ This module adds shared support for generic api modules In order to use this module, include it as part of a custom module as shown below. ** Note: The order of the import statements does matter. ** from ansible.module_utils.basic import * from ansible.module_utils.api import * The 'api' module provides the following common argument specs: * rate limit spec - rate: number of requests per time unit (int) - rate_limit: time window in which the limit is applied in seconds * retry spec - retries: number of attempts - retry_pause: delay between attempts in seconds """ import time def rate_limit_argument_spec(spec=None): """Creates an argument spec for working with rate limiting""" arg_spec = (dict( rate=dict(type='int'), rate_limit=dict(type='int'), )) if spec: arg_spec.update(spec) return arg_spec def retry_argument_spec(spec=None): """Creates an argument spec for working with retrying""" arg_spec = (dict( retries=dict(type='int'), retry_pause=dict(type='float', default=1), )) if spec: arg_spec.update(spec) return arg_spec def basic_auth_argument_spec(spec=None): arg_spec = (dict( api_username=dict(type='str', required=False), api_password=dict(type='str', required=False, no_log=True), api_url=dict(type='str', required=False), validate_certs=dict(type='bool', default=True) )) if spec: arg_spec.update(spec) return arg_spec def rate_limit(rate=None, rate_limit=None): """rate limiting decorator""" minrate = None if rate is not None and rate_limit is not None: minrate = float(rate_limit) / float(rate) def wrapper(f): last = [0.0] def ratelimited(*args,**kwargs): if minrate is not None: elapsed = time.clock() - last[0] left = minrate - elapsed if left > 0: time.sleep(left) last[0] = time.clock() ret = f(*args,**kwargs) return ret return ratelimited return wrapper def retry(retries=None, retry_pause=1): """Retry decorator""" def wrapper(f): retry_count = 0 def retried(*args,**kwargs): if retries is not None: ret = None while True: retry_count += 1 if retry_count >= retries: raise Exception("Retry limit exceeded: %d" % retries) try: ret = f(*args,**kwargs) except: pass if ret: break time.sleep(retry_pause) return ret return retried return wrapper
gpl-3.0
yanheven/nova
nova/virt/libvirt/remotefs.py
34
2255
# Copyright 2014 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_concurrency import processutils from oslo_log import log as logging from nova.i18n import _LE, _LW from nova import utils LOG = logging.getLogger(__name__) def mount_share(mount_path, export_path, export_type, options=None): """Mount a remote export to mount_path. :param mount_path: place where the remote export will be mounted :param export_path: path of the export to be mounted :export_type: remote export type (e.g. cifs, nfs, etc.) :options: A list containing mount options """ utils.execute('mkdir', '-p', mount_path) mount_cmd = ['mount', '-t', export_type] if options is not None: mount_cmd.extend(options) mount_cmd.extend([export_path, mount_path]) try: utils.execute(*mount_cmd, run_as_root=True) except processutils.ProcessExecutionError as exc: if 'Device or resource busy' in exc.message: LOG.warn(_LW("%s is already mounted"), export_path) else: raise def unmount_share(mount_path, export_path): """Unmount a remote share. :param mount_path: remote export mount point :param export_path: path of the remote export to be unmounted """ try: utils.execute('umount', mount_path, run_as_root=True, attempts=3, delay_on_retry=True) except processutils.ProcessExecutionError as exc: if 'target is busy' in exc.message: LOG.debug("The share %s is still in use.", export_path) else: LOG.exception(_LE("Couldn't unmount the share %s"), export_path)
apache-2.0
ic-labs/django-icekit
icekit_events/page_types/eventlistingfordate/migrations/0004_auto_20161115_1118.py
2
1204
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('icekit_plugins_image', '0001_initial'), ('eventlistingfordate', '0003_auto_20161019_1906'), ] operations = [ migrations.AddField( model_name='eventlistingpage', name='boosted_search_terms', field=models.TextField(blank=True, help_text='Words (space-separated) added here are boosted in relevance for search results increasing the chance of this appearing higher in the search results.'), ), migrations.AddField( model_name='eventlistingpage', name='hero_image', field=models.ForeignKey(related_name='+', blank=True, null=True, help_text=b'The hero image for this content.', to='icekit_plugins_image.Image'), ), migrations.AddField( model_name='eventlistingpage', name='list_image', field=models.ImageField(blank=True, help_text=b"image to use in listings. Default image is used if this isn't given", upload_to=b'icekit/listable/list_image/'), ), ]
mit
MostafaGazar/tensorflow
tensorflow/contrib/learn/python/learn/dataframe/dataframe.py
85
4704
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """A DataFrame is a container for ingesting and preprocessing data.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections from .series import Series from .transform import Transform class DataFrame(object): """A DataFrame is a container for ingesting and preprocessing data.""" def __init__(self): self._columns = {} def columns(self): """Set of the column names.""" return frozenset(self._columns.keys()) def __len__(self): """The number of columns in the DataFrame.""" return len(self._columns) def assign(self, **kwargs): """Adds columns to DataFrame. Args: **kwargs: assignments of the form key=value where key is a string and value is an `inflow.Series`, a `pandas.Series` or a numpy array. Raises: TypeError: keys are not strings. TypeError: values are not `inflow.Series`, `pandas.Series` or `numpy.ndarray`. TODO(jamieas): pandas assign method returns a new DataFrame. Consider switching to this behavior, changing the name or adding in_place as an argument. """ for k, v in kwargs.items(): if not isinstance(k, str): raise TypeError("The only supported type for keys is string; got %s" % type(k)) if v is None: del self._columns[k] elif isinstance(v, Series): self._columns[k] = v elif isinstance(v, Transform) and v.input_valency() == 0: self._columns[k] = v() else: raise TypeError( "Column in assignment must be an inflow.Series, inflow.Transform," " or None; got type '%s'." % type(v).__name__) def select_columns(self, keys): """Returns a new DataFrame with a subset of columns. Args: keys: A list of strings. Each should be the name of a column in the DataFrame. Returns: A new DataFrame containing only the specified columns. """ result = type(self)() for key in keys: result[key] = self._columns[key] return result def exclude_columns(self, exclude_keys): """Returns a new DataFrame with all columns not excluded via exclude_keys. Args: exclude_keys: A list of strings. Each should be the name of a column in the DataFrame. These columns will be excluded from the result. Returns: A new DataFrame containing all columns except those specified. """ result = type(self)() for key, value in self._columns.items(): if key not in exclude_keys: result[key] = value return result def __getitem__(self, key): """Indexing functionality for DataFrames. Args: key: a string or an iterable of strings. Returns: A Series or list of Series corresponding to the given keys. """ if isinstance(key, str): return self._columns[key] elif isinstance(key, collections.Iterable): for i in key: if not isinstance(i, str): raise TypeError("Expected a String; entry %s has type %s." % (i, type(i).__name__)) return [self.__getitem__(i) for i in key] raise TypeError( "Invalid index: %s of type %s. Only strings or lists of strings are " "supported." % (key, type(key))) def __setitem__(self, key, value): if isinstance(key, str): key = [key] if isinstance(value, Series): value = [value] self.assign(**dict(zip(key, value))) def __delitem__(self, key): if isinstance(key, str): key = [key] value = [None for _ in key] self.assign(**dict(zip(key, value))) def build(self, **kwargs): # We do not allow passing a cache here, because that would encourage # working around the rule that DataFrames cannot be expected to be # synced with each other (e.g., they shuffle independently). cache = {} tensors = {name: c.build(cache, **kwargs) for name, c in self._columns.items()} return tensors
apache-2.0
willusher/ansible-modules-core
files/replace.py
6
5594
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2013, Evan Kaufman <evan@digitalflophouse.com # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import re import os import tempfile DOCUMENTATION = """ --- module: replace author: "Evan Kaufman (@EvanK)" extends_documentation_fragment: - files - validate short_description: Replace all instances of a particular string in a file using a back-referenced regular expression. description: - This module will replace all instances of a pattern within a file. - It is up to the user to maintain idempotence by ensuring that the same pattern would never match any replacements made. version_added: "1.6" options: dest: required: true aliases: [ name, destfile ] description: - The file to modify. regexp: required: true description: - The regular expression to look for in the contents of the file. Uses Python regular expressions; see U(http://docs.python.org/2/library/re.html). Uses multiline mode, which means C(^) and C($) match the beginning and end respectively of I(each line) of the file. replace: required: false description: - The string to replace regexp matches. May contain backreferences that will get expanded with the regexp capture groups if the regexp matches. If not set, matches are removed entirely. backup: required: false default: "no" choices: [ "yes", "no" ] description: - Create a backup file including the timestamp information so you can get the original file back if you somehow clobbered it incorrectly. others: description: - All arguments accepted by the M(file) module also work here. required: false follow: required: false default: "no" choices: [ "yes", "no" ] version_added: "1.9" description: - 'This flag indicates that filesystem links, if they exist, should be followed.' """ EXAMPLES = r""" - replace: dest=/etc/hosts regexp='(\s+)old\.host\.name(\s+.*)?$' replace='\1new.host.name\2' backup=yes - replace: dest=/home/jdoe/.ssh/known_hosts regexp='^old\.host\.name[^\n]*\n' owner=jdoe group=jdoe mode=644 - replace: dest=/etc/apache/ports regexp='^(NameVirtualHost|Listen)\s+80\s*$' replace='\1 127.0.0.1:8080' validate='/usr/sbin/apache2ctl -f %s -t' """ def write_changes(module,contents,dest): tmpfd, tmpfile = tempfile.mkstemp() f = os.fdopen(tmpfd,'wb') f.write(contents) f.close() validate = module.params.get('validate', None) valid = not validate if validate: if "%s" not in validate: module.fail_json(msg="validate must contain %%s: %s" % (validate)) (rc, out, err) = module.run_command(validate % tmpfile) valid = rc == 0 if rc != 0: module.fail_json(msg='failed to validate: ' 'rc:%s error:%s' % (rc,err)) if valid: module.atomic_move(tmpfile, dest, unsafe_writes=module.params['unsafe_writes']) def check_file_attrs(module, changed, message): file_args = module.load_file_common_arguments(module.params) if module.set_file_attributes_if_different(file_args, False): if changed: message += " and " changed = True message += "ownership, perms or SE linux context changed" return message, changed def main(): module = AnsibleModule( argument_spec=dict( dest=dict(required=True, aliases=['name', 'destfile']), regexp=dict(required=True), replace=dict(default='', type='str'), backup=dict(default=False, type='bool'), validate=dict(default=None, type='str'), ), add_file_common_args=True, supports_check_mode=True ) params = module.params dest = os.path.expanduser(params['dest']) if os.path.isdir(dest): module.fail_json(rc=256, msg='Destination %s is a directory !' % dest) if not os.path.exists(dest): module.fail_json(rc=257, msg='Destination %s does not exist !' % dest) else: f = open(dest, 'rb') contents = f.read() f.close() mre = re.compile(params['regexp'], re.MULTILINE) result = re.subn(mre, params['replace'], contents, 0) if result[1] > 0 and contents != result[0]: msg = '%s replacements made' % result[1] changed = True else: msg = '' changed = False if changed and not module.check_mode: if params['backup'] and os.path.exists(dest): module.backup_local(dest) if params['follow'] and os.path.islink(dest): dest = os.path.realpath(dest) write_changes(module, result[0], dest) msg, changed = check_file_attrs(module, changed, msg) module.exit_json(changed=changed, msg=msg) # this is magic, see lib/ansible/module_common.py from ansible.module_utils.basic import * if __name__ == '__main__': main()
gpl-3.0
oberlin/django
django/templatetags/i18n.py
219
19311
from __future__ import unicode_literals import sys from django.conf import settings from django.template import Library, Node, TemplateSyntaxError, Variable from django.template.base import TOKEN_TEXT, TOKEN_VAR, render_value_in_context from django.template.defaulttags import token_kwargs from django.utils import six, translation from django.utils.safestring import SafeData, mark_safe register = Library() class GetAvailableLanguagesNode(Node): def __init__(self, variable): self.variable = variable def render(self, context): context[self.variable] = [(k, translation.ugettext(v)) for k, v in settings.LANGUAGES] return '' class GetLanguageInfoNode(Node): def __init__(self, lang_code, variable): self.lang_code = lang_code self.variable = variable def render(self, context): lang_code = self.lang_code.resolve(context) context[self.variable] = translation.get_language_info(lang_code) return '' class GetLanguageInfoListNode(Node): def __init__(self, languages, variable): self.languages = languages self.variable = variable def get_language_info(self, language): # ``language`` is either a language code string or a sequence # with the language code as its first item if len(language[0]) > 1: return translation.get_language_info(language[0]) else: return translation.get_language_info(str(language)) def render(self, context): langs = self.languages.resolve(context) context[self.variable] = [self.get_language_info(lang) for lang in langs] return '' class GetCurrentLanguageNode(Node): def __init__(self, variable): self.variable = variable def render(self, context): context[self.variable] = translation.get_language() return '' class GetCurrentLanguageBidiNode(Node): def __init__(self, variable): self.variable = variable def render(self, context): context[self.variable] = translation.get_language_bidi() return '' class TranslateNode(Node): def __init__(self, filter_expression, noop, asvar=None, message_context=None): self.noop = noop self.asvar = asvar self.message_context = message_context self.filter_expression = filter_expression if isinstance(self.filter_expression.var, six.string_types): self.filter_expression.var = Variable("'%s'" % self.filter_expression.var) def render(self, context): self.filter_expression.var.translate = not self.noop if self.message_context: self.filter_expression.var.message_context = ( self.message_context.resolve(context)) output = self.filter_expression.resolve(context) value = render_value_in_context(output, context) # Restore percent signs. Percent signs in template text are doubled # so they are not interpreted as string format flags. is_safe = isinstance(value, SafeData) value = value.replace('%%', '%') value = mark_safe(value) if is_safe else value if self.asvar: context[self.asvar] = value return '' else: return value class BlockTranslateNode(Node): def __init__(self, extra_context, singular, plural=None, countervar=None, counter=None, message_context=None, trimmed=False, asvar=None): self.extra_context = extra_context self.singular = singular self.plural = plural self.countervar = countervar self.counter = counter self.message_context = message_context self.trimmed = trimmed self.asvar = asvar def render_token_list(self, tokens): result = [] vars = [] for token in tokens: if token.token_type == TOKEN_TEXT: result.append(token.contents.replace('%', '%%')) elif token.token_type == TOKEN_VAR: result.append('%%(%s)s' % token.contents) vars.append(token.contents) msg = ''.join(result) if self.trimmed: msg = translation.trim_whitespace(msg) return msg, vars def render(self, context, nested=False): if self.message_context: message_context = self.message_context.resolve(context) else: message_context = None tmp_context = {} for var, val in self.extra_context.items(): tmp_context[var] = val.resolve(context) # Update() works like a push(), so corresponding context.pop() is at # the end of function context.update(tmp_context) singular, vars = self.render_token_list(self.singular) if self.plural and self.countervar and self.counter: count = self.counter.resolve(context) context[self.countervar] = count plural, plural_vars = self.render_token_list(self.plural) if message_context: result = translation.npgettext(message_context, singular, plural, count) else: result = translation.ungettext(singular, plural, count) vars.extend(plural_vars) else: if message_context: result = translation.pgettext(message_context, singular) else: result = translation.ugettext(singular) default_value = context.template.engine.string_if_invalid def render_value(key): if key in context: val = context[key] else: val = default_value % key if '%s' in default_value else default_value return render_value_in_context(val, context) data = {v: render_value(v) for v in vars} context.pop() try: result = result % data except (KeyError, ValueError): if nested: # Either string is malformed, or it's a bug raise TemplateSyntaxError("'blocktrans' is unable to format " "string returned by gettext: %r using %r" % (result, data)) with translation.override(None): result = self.render(context, nested=True) if self.asvar: context[self.asvar] = result return '' else: return result class LanguageNode(Node): def __init__(self, nodelist, language): self.nodelist = nodelist self.language = language def render(self, context): with translation.override(self.language.resolve(context)): output = self.nodelist.render(context) return output @register.tag("get_available_languages") def do_get_available_languages(parser, token): """ This will store a list of available languages in the context. Usage:: {% get_available_languages as languages %} {% for language in languages %} ... {% endfor %} This will just pull the LANGUAGES setting from your setting file (or the default settings) and put it into the named variable. """ # token.split_contents() isn't useful here because this tag doesn't accept variable as arguments args = token.contents.split() if len(args) != 3 or args[1] != 'as': raise TemplateSyntaxError("'get_available_languages' requires 'as variable' (got %r)" % args) return GetAvailableLanguagesNode(args[2]) @register.tag("get_language_info") def do_get_language_info(parser, token): """ This will store the language information dictionary for the given language code in a context variable. Usage:: {% get_language_info for LANGUAGE_CODE as l %} {{ l.code }} {{ l.name }} {{ l.name_translated }} {{ l.name_local }} {{ l.bidi|yesno:"bi-directional,uni-directional" }} """ args = token.split_contents() if len(args) != 5 or args[1] != 'for' or args[3] != 'as': raise TemplateSyntaxError("'%s' requires 'for string as variable' (got %r)" % (args[0], args[1:])) return GetLanguageInfoNode(parser.compile_filter(args[2]), args[4]) @register.tag("get_language_info_list") def do_get_language_info_list(parser, token): """ This will store a list of language information dictionaries for the given language codes in a context variable. The language codes can be specified either as a list of strings or a settings.LANGUAGES style list (or any sequence of sequences whose first items are language codes). Usage:: {% get_language_info_list for LANGUAGES as langs %} {% for l in langs %} {{ l.code }} {{ l.name }} {{ l.name_translated }} {{ l.name_local }} {{ l.bidi|yesno:"bi-directional,uni-directional" }} {% endfor %} """ args = token.split_contents() if len(args) != 5 or args[1] != 'for' or args[3] != 'as': raise TemplateSyntaxError("'%s' requires 'for sequence as variable' (got %r)" % (args[0], args[1:])) return GetLanguageInfoListNode(parser.compile_filter(args[2]), args[4]) @register.filter def language_name(lang_code): return translation.get_language_info(lang_code)['name'] @register.filter def language_name_translated(lang_code): english_name = translation.get_language_info(lang_code)['name'] return translation.ugettext(english_name) @register.filter def language_name_local(lang_code): return translation.get_language_info(lang_code)['name_local'] @register.filter def language_bidi(lang_code): return translation.get_language_info(lang_code)['bidi'] @register.tag("get_current_language") def do_get_current_language(parser, token): """ This will store the current language in the context. Usage:: {% get_current_language as language %} This will fetch the currently active language and put it's value into the ``language`` context variable. """ # token.split_contents() isn't useful here because this tag doesn't accept variable as arguments args = token.contents.split() if len(args) != 3 or args[1] != 'as': raise TemplateSyntaxError("'get_current_language' requires 'as variable' (got %r)" % args) return GetCurrentLanguageNode(args[2]) @register.tag("get_current_language_bidi") def do_get_current_language_bidi(parser, token): """ This will store the current language layout in the context. Usage:: {% get_current_language_bidi as bidi %} This will fetch the currently active language's layout and put it's value into the ``bidi`` context variable. True indicates right-to-left layout, otherwise left-to-right """ # token.split_contents() isn't useful here because this tag doesn't accept variable as arguments args = token.contents.split() if len(args) != 3 or args[1] != 'as': raise TemplateSyntaxError("'get_current_language_bidi' requires 'as variable' (got %r)" % args) return GetCurrentLanguageBidiNode(args[2]) @register.tag("trans") def do_translate(parser, token): """ This will mark a string for translation and will translate the string for the current language. Usage:: {% trans "this is a test" %} This will mark the string for translation so it will be pulled out by mark-messages.py into the .po files and will run the string through the translation engine. There is a second form:: {% trans "this is a test" noop %} This will only mark for translation, but will return the string unchanged. Use it when you need to store values into forms that should be translated later on. You can use variables instead of constant strings to translate stuff you marked somewhere else:: {% trans variable %} This will just try to translate the contents of the variable ``variable``. Make sure that the string in there is something that is in the .po file. It is possible to store the translated string into a variable:: {% trans "this is a test" as var %} {{ var }} Contextual translations are also supported:: {% trans "this is a test" context "greeting" %} This is equivalent to calling pgettext instead of (u)gettext. """ bits = token.split_contents() if len(bits) < 2: raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0]) message_string = parser.compile_filter(bits[1]) remaining = bits[2:] noop = False asvar = None message_context = None seen = set() invalid_context = {'as', 'noop'} while remaining: option = remaining.pop(0) if option in seen: raise TemplateSyntaxError( "The '%s' option was specified more than once." % option, ) elif option == 'noop': noop = True elif option == 'context': try: value = remaining.pop(0) except IndexError: msg = "No argument provided to the '%s' tag for the context option." % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) if value in invalid_context: raise TemplateSyntaxError( "Invalid argument '%s' provided to the '%s' tag for the context option" % (value, bits[0]), ) message_context = parser.compile_filter(value) elif option == 'as': try: value = remaining.pop(0) except IndexError: msg = "No argument provided to the '%s' tag for the as option." % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) asvar = value else: raise TemplateSyntaxError( "Unknown argument for '%s' tag: '%s'. The only options " "available are 'noop', 'context' \"xxx\", and 'as VAR'." % ( bits[0], option, ) ) seen.add(option) return TranslateNode(message_string, noop, asvar, message_context) @register.tag("blocktrans") def do_block_translate(parser, token): """ This will translate a block of text with parameters. Usage:: {% blocktrans with bar=foo|filter boo=baz|filter %} This is {{ bar }} and {{ boo }}. {% endblocktrans %} Additionally, this supports pluralization:: {% blocktrans count count=var|length %} There is {{ count }} object. {% plural %} There are {{ count }} objects. {% endblocktrans %} This is much like ngettext, only in template syntax. The "var as value" legacy format is still supported:: {% blocktrans with foo|filter as bar and baz|filter as boo %} {% blocktrans count var|length as count %} The translated string can be stored in a variable using `asvar`:: {% blocktrans with bar=foo|filter boo=baz|filter asvar var %} This is {{ bar }} and {{ boo }}. {% endblocktrans %} {{ var }} Contextual translations are supported:: {% blocktrans with bar=foo|filter context "greeting" %} This is {{ bar }}. {% endblocktrans %} This is equivalent to calling pgettext/npgettext instead of (u)gettext/(u)ngettext. """ bits = token.split_contents() options = {} remaining_bits = bits[1:] asvar = None while remaining_bits: option = remaining_bits.pop(0) if option in options: raise TemplateSyntaxError('The %r option was specified more ' 'than once.' % option) if option == 'with': value = token_kwargs(remaining_bits, parser, support_legacy=True) if not value: raise TemplateSyntaxError('"with" in %r tag needs at least ' 'one keyword argument.' % bits[0]) elif option == 'count': value = token_kwargs(remaining_bits, parser, support_legacy=True) if len(value) != 1: raise TemplateSyntaxError('"count" in %r tag expected exactly ' 'one keyword argument.' % bits[0]) elif option == "context": try: value = remaining_bits.pop(0) value = parser.compile_filter(value) except Exception: msg = ( '"context" in %r tag expected ' 'exactly one argument.') % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) elif option == "trimmed": value = True elif option == "asvar": try: value = remaining_bits.pop(0) except IndexError: msg = "No argument provided to the '%s' tag for the asvar option." % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) asvar = value else: raise TemplateSyntaxError('Unknown argument for %r tag: %r.' % (bits[0], option)) options[option] = value if 'count' in options: countervar, counter = list(options['count'].items())[0] else: countervar, counter = None, None if 'context' in options: message_context = options['context'] else: message_context = None extra_context = options.get('with', {}) trimmed = options.get("trimmed", False) singular = [] plural = [] while parser.tokens: token = parser.next_token() if token.token_type in (TOKEN_VAR, TOKEN_TEXT): singular.append(token) else: break if countervar and counter: if token.contents.strip() != 'plural': raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags inside it") while parser.tokens: token = parser.next_token() if token.token_type in (TOKEN_VAR, TOKEN_TEXT): plural.append(token) else: break if token.contents.strip() != 'endblocktrans': raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents) return BlockTranslateNode(extra_context, singular, plural, countervar, counter, message_context, trimmed=trimmed, asvar=asvar) @register.tag def language(parser, token): """ This will enable the given language just for this block. Usage:: {% language "de" %} This is {{ bar }} and {{ boo }}. {% endlanguage %} """ bits = token.split_contents() if len(bits) != 2: raise TemplateSyntaxError("'%s' takes one argument (language)" % bits[0]) language = parser.compile_filter(bits[1]) nodelist = parser.parse(('endlanguage',)) parser.delete_first_token() return LanguageNode(nodelist, language)
bsd-3-clause
michaelgugino/turbo-lister
flask/testsuite/blueprints.py
563
28089
# -*- coding: utf-8 -*- """ flask.testsuite.blueprints ~~~~~~~~~~~~~~~~~~~~~~~~~~ Blueprints (and currently modules) :copyright: (c) 2011 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import flask import unittest import warnings from flask.testsuite import FlaskTestCase, emits_module_deprecation_warning from flask._compat import text_type from werkzeug.exceptions import NotFound from werkzeug.http import parse_cache_control_header from jinja2 import TemplateNotFound # import moduleapp here because it uses deprecated features and we don't # want to see the warnings warnings.simplefilter('ignore', DeprecationWarning) from moduleapp import app as moduleapp warnings.simplefilter('default', DeprecationWarning) class ModuleTestCase(FlaskTestCase): @emits_module_deprecation_warning def test_basic_module(self): app = flask.Flask(__name__) admin = flask.Module(__name__, 'admin', url_prefix='/admin') @admin.route('/') def admin_index(): return 'admin index' @admin.route('/login') def admin_login(): return 'admin login' @admin.route('/logout') def admin_logout(): return 'admin logout' @app.route('/') def index(): return 'the index' app.register_module(admin) c = app.test_client() self.assert_equal(c.get('/').data, b'the index') self.assert_equal(c.get('/admin/').data, b'admin index') self.assert_equal(c.get('/admin/login').data, b'admin login') self.assert_equal(c.get('/admin/logout').data, b'admin logout') @emits_module_deprecation_warning def test_default_endpoint_name(self): app = flask.Flask(__name__) mod = flask.Module(__name__, 'frontend') def index(): return 'Awesome' mod.add_url_rule('/', view_func=index) app.register_module(mod) rv = app.test_client().get('/') self.assert_equal(rv.data, b'Awesome') with app.test_request_context(): self.assert_equal(flask.url_for('frontend.index'), '/') @emits_module_deprecation_warning def test_request_processing(self): catched = [] app = flask.Flask(__name__) admin = flask.Module(__name__, 'admin', url_prefix='/admin') @admin.before_request def before_admin_request(): catched.append('before-admin') @admin.after_request def after_admin_request(response): catched.append('after-admin') return response @admin.route('/') def admin_index(): return 'the admin' @app.before_request def before_request(): catched.append('before-app') @app.after_request def after_request(response): catched.append('after-app') return response @app.route('/') def index(): return 'the index' app.register_module(admin) c = app.test_client() self.assert_equal(c.get('/').data, b'the index') self.assert_equal(catched, ['before-app', 'after-app']) del catched[:] self.assert_equal(c.get('/admin/').data, b'the admin') self.assert_equal(catched, ['before-app', 'before-admin', 'after-admin', 'after-app']) @emits_module_deprecation_warning def test_context_processors(self): app = flask.Flask(__name__) admin = flask.Module(__name__, 'admin', url_prefix='/admin') @app.context_processor def inject_all_regular(): return {'a': 1} @admin.context_processor def inject_admin(): return {'b': 2} @admin.app_context_processor def inject_all_module(): return {'c': 3} @app.route('/') def index(): return flask.render_template_string('{{ a }}{{ b }}{{ c }}') @admin.route('/') def admin_index(): return flask.render_template_string('{{ a }}{{ b }}{{ c }}') app.register_module(admin) c = app.test_client() self.assert_equal(c.get('/').data, b'13') self.assert_equal(c.get('/admin/').data, b'123') @emits_module_deprecation_warning def test_late_binding(self): app = flask.Flask(__name__) admin = flask.Module(__name__, 'admin') @admin.route('/') def index(): return '42' app.register_module(admin, url_prefix='/admin') self.assert_equal(app.test_client().get('/admin/').data, b'42') @emits_module_deprecation_warning def test_error_handling(self): app = flask.Flask(__name__) admin = flask.Module(__name__, 'admin') @admin.app_errorhandler(404) def not_found(e): return 'not found', 404 @admin.app_errorhandler(500) def internal_server_error(e): return 'internal server error', 500 @admin.route('/') def index(): flask.abort(404) @admin.route('/error') def error(): 1 // 0 app.register_module(admin) c = app.test_client() rv = c.get('/') self.assert_equal(rv.status_code, 404) self.assert_equal(rv.data, b'not found') rv = c.get('/error') self.assert_equal(rv.status_code, 500) self.assert_equal(b'internal server error', rv.data) def test_templates_and_static(self): app = moduleapp app.testing = True c = app.test_client() rv = c.get('/') self.assert_equal(rv.data, b'Hello from the Frontend') rv = c.get('/admin/') self.assert_equal(rv.data, b'Hello from the Admin') rv = c.get('/admin/index2') self.assert_equal(rv.data, b'Hello from the Admin') rv = c.get('/admin/static/test.txt') self.assert_equal(rv.data.strip(), b'Admin File') rv.close() rv = c.get('/admin/static/css/test.css') self.assert_equal(rv.data.strip(), b'/* nested file */') rv.close() with app.test_request_context(): self.assert_equal(flask.url_for('admin.static', filename='test.txt'), '/admin/static/test.txt') with app.test_request_context(): try: flask.render_template('missing.html') except TemplateNotFound as e: self.assert_equal(e.name, 'missing.html') else: self.assert_true(0, 'expected exception') with flask.Flask(__name__).test_request_context(): self.assert_equal(flask.render_template('nested/nested.txt'), 'I\'m nested') def test_safe_access(self): app = moduleapp with app.test_request_context(): f = app.view_functions['admin.static'] try: f('/etc/passwd') except NotFound: pass else: self.assert_true(0, 'expected exception') try: f('../__init__.py') except NotFound: pass else: self.assert_true(0, 'expected exception') # testcase for a security issue that may exist on windows systems import os import ntpath old_path = os.path os.path = ntpath try: try: f('..\\__init__.py') except NotFound: pass else: self.assert_true(0, 'expected exception') finally: os.path = old_path @emits_module_deprecation_warning def test_endpoint_decorator(self): from werkzeug.routing import Submount, Rule from flask import Module app = flask.Flask(__name__) app.testing = True app.url_map.add(Submount('/foo', [ Rule('/bar', endpoint='bar'), Rule('/', endpoint='index') ])) module = Module(__name__, __name__) @module.endpoint('bar') def bar(): return 'bar' @module.endpoint('index') def index(): return 'index' app.register_module(module) c = app.test_client() self.assert_equal(c.get('/foo/').data, b'index') self.assert_equal(c.get('/foo/bar').data, b'bar') class BlueprintTestCase(FlaskTestCase): def test_blueprint_specific_error_handling(self): frontend = flask.Blueprint('frontend', __name__) backend = flask.Blueprint('backend', __name__) sideend = flask.Blueprint('sideend', __name__) @frontend.errorhandler(403) def frontend_forbidden(e): return 'frontend says no', 403 @frontend.route('/frontend-no') def frontend_no(): flask.abort(403) @backend.errorhandler(403) def backend_forbidden(e): return 'backend says no', 403 @backend.route('/backend-no') def backend_no(): flask.abort(403) @sideend.route('/what-is-a-sideend') def sideend_no(): flask.abort(403) app = flask.Flask(__name__) app.register_blueprint(frontend) app.register_blueprint(backend) app.register_blueprint(sideend) @app.errorhandler(403) def app_forbidden(e): return 'application itself says no', 403 c = app.test_client() self.assert_equal(c.get('/frontend-no').data, b'frontend says no') self.assert_equal(c.get('/backend-no').data, b'backend says no') self.assert_equal(c.get('/what-is-a-sideend').data, b'application itself says no') def test_blueprint_url_definitions(self): bp = flask.Blueprint('test', __name__) @bp.route('/foo', defaults={'baz': 42}) def foo(bar, baz): return '%s/%d' % (bar, baz) @bp.route('/bar') def bar(bar): return text_type(bar) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/1', url_defaults={'bar': 23}) app.register_blueprint(bp, url_prefix='/2', url_defaults={'bar': 19}) c = app.test_client() self.assert_equal(c.get('/1/foo').data, b'23/42') self.assert_equal(c.get('/2/foo').data, b'19/42') self.assert_equal(c.get('/1/bar').data, b'23') self.assert_equal(c.get('/2/bar').data, b'19') def test_blueprint_url_processors(self): bp = flask.Blueprint('frontend', __name__, url_prefix='/<lang_code>') @bp.url_defaults def add_language_code(endpoint, values): values.setdefault('lang_code', flask.g.lang_code) @bp.url_value_preprocessor def pull_lang_code(endpoint, values): flask.g.lang_code = values.pop('lang_code') @bp.route('/') def index(): return flask.url_for('.about') @bp.route('/about') def about(): return flask.url_for('.index') app = flask.Flask(__name__) app.register_blueprint(bp) c = app.test_client() self.assert_equal(c.get('/de/').data, b'/de/about') self.assert_equal(c.get('/de/about').data, b'/de/') def test_templates_and_static(self): from blueprintapp import app c = app.test_client() rv = c.get('/') self.assert_equal(rv.data, b'Hello from the Frontend') rv = c.get('/admin/') self.assert_equal(rv.data, b'Hello from the Admin') rv = c.get('/admin/index2') self.assert_equal(rv.data, b'Hello from the Admin') rv = c.get('/admin/static/test.txt') self.assert_equal(rv.data.strip(), b'Admin File') rv.close() rv = c.get('/admin/static/css/test.css') self.assert_equal(rv.data.strip(), b'/* nested file */') rv.close() # try/finally, in case other tests use this app for Blueprint tests. max_age_default = app.config['SEND_FILE_MAX_AGE_DEFAULT'] try: expected_max_age = 3600 if app.config['SEND_FILE_MAX_AGE_DEFAULT'] == expected_max_age: expected_max_age = 7200 app.config['SEND_FILE_MAX_AGE_DEFAULT'] = expected_max_age rv = c.get('/admin/static/css/test.css') cc = parse_cache_control_header(rv.headers['Cache-Control']) self.assert_equal(cc.max_age, expected_max_age) rv.close() finally: app.config['SEND_FILE_MAX_AGE_DEFAULT'] = max_age_default with app.test_request_context(): self.assert_equal(flask.url_for('admin.static', filename='test.txt'), '/admin/static/test.txt') with app.test_request_context(): try: flask.render_template('missing.html') except TemplateNotFound as e: self.assert_equal(e.name, 'missing.html') else: self.assert_true(0, 'expected exception') with flask.Flask(__name__).test_request_context(): self.assert_equal(flask.render_template('nested/nested.txt'), 'I\'m nested') def test_default_static_cache_timeout(self): app = flask.Flask(__name__) class MyBlueprint(flask.Blueprint): def get_send_file_max_age(self, filename): return 100 blueprint = MyBlueprint('blueprint', __name__, static_folder='static') app.register_blueprint(blueprint) # try/finally, in case other tests use this app for Blueprint tests. max_age_default = app.config['SEND_FILE_MAX_AGE_DEFAULT'] try: with app.test_request_context(): unexpected_max_age = 3600 if app.config['SEND_FILE_MAX_AGE_DEFAULT'] == unexpected_max_age: unexpected_max_age = 7200 app.config['SEND_FILE_MAX_AGE_DEFAULT'] = unexpected_max_age rv = blueprint.send_static_file('index.html') cc = parse_cache_control_header(rv.headers['Cache-Control']) self.assert_equal(cc.max_age, 100) rv.close() finally: app.config['SEND_FILE_MAX_AGE_DEFAULT'] = max_age_default def test_templates_list(self): from blueprintapp import app templates = sorted(app.jinja_env.list_templates()) self.assert_equal(templates, ['admin/index.html', 'frontend/index.html']) def test_dotted_names(self): frontend = flask.Blueprint('myapp.frontend', __name__) backend = flask.Blueprint('myapp.backend', __name__) @frontend.route('/fe') def frontend_index(): return flask.url_for('myapp.backend.backend_index') @frontend.route('/fe2') def frontend_page2(): return flask.url_for('.frontend_index') @backend.route('/be') def backend_index(): return flask.url_for('myapp.frontend.frontend_index') app = flask.Flask(__name__) app.register_blueprint(frontend) app.register_blueprint(backend) c = app.test_client() self.assert_equal(c.get('/fe').data.strip(), b'/be') self.assert_equal(c.get('/fe2').data.strip(), b'/fe') self.assert_equal(c.get('/be').data.strip(), b'/fe') def test_dotted_names_from_app(self): app = flask.Flask(__name__) app.testing = True test = flask.Blueprint('test', __name__) @app.route('/') def app_index(): return flask.url_for('test.index') @test.route('/test/') def index(): return flask.url_for('app_index') app.register_blueprint(test) with app.test_client() as c: rv = c.get('/') self.assert_equal(rv.data, b'/test/') def test_empty_url_defaults(self): bp = flask.Blueprint('bp', __name__) @bp.route('/', defaults={'page': 1}) @bp.route('/page/<int:page>') def something(page): return str(page) app = flask.Flask(__name__) app.register_blueprint(bp) c = app.test_client() self.assert_equal(c.get('/').data, b'1') self.assert_equal(c.get('/page/2').data, b'2') def test_route_decorator_custom_endpoint(self): bp = flask.Blueprint('bp', __name__) @bp.route('/foo') def foo(): return flask.request.endpoint @bp.route('/bar', endpoint='bar') def foo_bar(): return flask.request.endpoint @bp.route('/bar/123', endpoint='123') def foo_bar_foo(): return flask.request.endpoint @bp.route('/bar/foo') def bar_foo(): return flask.request.endpoint app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.request.endpoint c = app.test_client() self.assertEqual(c.get('/').data, b'index') self.assertEqual(c.get('/py/foo').data, b'bp.foo') self.assertEqual(c.get('/py/bar').data, b'bp.bar') self.assertEqual(c.get('/py/bar/123').data, b'bp.123') self.assertEqual(c.get('/py/bar/foo').data, b'bp.bar_foo') def test_route_decorator_custom_endpoint_with_dots(self): bp = flask.Blueprint('bp', __name__) @bp.route('/foo') def foo(): return flask.request.endpoint try: @bp.route('/bar', endpoint='bar.bar') def foo_bar(): return flask.request.endpoint except AssertionError: pass else: raise AssertionError('expected AssertionError not raised') try: @bp.route('/bar/123', endpoint='bar.123') def foo_bar_foo(): return flask.request.endpoint except AssertionError: pass else: raise AssertionError('expected AssertionError not raised') def foo_foo_foo(): pass self.assertRaises( AssertionError, lambda: bp.add_url_rule( '/bar/123', endpoint='bar.123', view_func=foo_foo_foo ) ) self.assertRaises( AssertionError, bp.route('/bar/123', endpoint='bar.123'), lambda: None ) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') c = app.test_client() self.assertEqual(c.get('/py/foo').data, b'bp.foo') # The rule's didn't actually made it through rv = c.get('/py/bar') assert rv.status_code == 404 rv = c.get('/py/bar/123') assert rv.status_code == 404 def test_template_filter(self): bp = flask.Blueprint('bp', __name__) @bp.app_template_filter() def my_reverse(s): return s[::-1] app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') self.assert_in('my_reverse', app.jinja_env.filters.keys()) self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse) self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba') def test_add_template_filter(self): bp = flask.Blueprint('bp', __name__) def my_reverse(s): return s[::-1] bp.add_app_template_filter(my_reverse) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') self.assert_in('my_reverse', app.jinja_env.filters.keys()) self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse) self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba') def test_template_filter_with_name(self): bp = flask.Blueprint('bp', __name__) @bp.app_template_filter('strrev') def my_reverse(s): return s[::-1] app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') self.assert_in('strrev', app.jinja_env.filters.keys()) self.assert_equal(app.jinja_env.filters['strrev'], my_reverse) self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba') def test_add_template_filter_with_name(self): bp = flask.Blueprint('bp', __name__) def my_reverse(s): return s[::-1] bp.add_app_template_filter(my_reverse, 'strrev') app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') self.assert_in('strrev', app.jinja_env.filters.keys()) self.assert_equal(app.jinja_env.filters['strrev'], my_reverse) self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba') def test_template_filter_with_template(self): bp = flask.Blueprint('bp', __name__) @bp.app_template_filter() def super_reverse(s): return s[::-1] app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') self.assert_equal(rv.data, b'dcba') def test_template_filter_after_route_with_template(self): app = flask.Flask(__name__) @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') bp = flask.Blueprint('bp', __name__) @bp.app_template_filter() def super_reverse(s): return s[::-1] app.register_blueprint(bp, url_prefix='/py') rv = app.test_client().get('/') self.assert_equal(rv.data, b'dcba') def test_add_template_filter_with_template(self): bp = flask.Blueprint('bp', __name__) def super_reverse(s): return s[::-1] bp.add_app_template_filter(super_reverse) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') self.assert_equal(rv.data, b'dcba') def test_template_filter_with_name_and_template(self): bp = flask.Blueprint('bp', __name__) @bp.app_template_filter('super_reverse') def my_reverse(s): return s[::-1] app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') self.assert_equal(rv.data, b'dcba') def test_add_template_filter_with_name_and_template(self): bp = flask.Blueprint('bp', __name__) def my_reverse(s): return s[::-1] bp.add_app_template_filter(my_reverse, 'super_reverse') app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') self.assert_equal(rv.data, b'dcba') def test_template_test(self): bp = flask.Blueprint('bp', __name__) @bp.app_template_test() def is_boolean(value): return isinstance(value, bool) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') self.assert_in('is_boolean', app.jinja_env.tests.keys()) self.assert_equal(app.jinja_env.tests['is_boolean'], is_boolean) self.assert_true(app.jinja_env.tests['is_boolean'](False)) def test_add_template_test(self): bp = flask.Blueprint('bp', __name__) def is_boolean(value): return isinstance(value, bool) bp.add_app_template_test(is_boolean) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') self.assert_in('is_boolean', app.jinja_env.tests.keys()) self.assert_equal(app.jinja_env.tests['is_boolean'], is_boolean) self.assert_true(app.jinja_env.tests['is_boolean'](False)) def test_template_test_with_name(self): bp = flask.Blueprint('bp', __name__) @bp.app_template_test('boolean') def is_boolean(value): return isinstance(value, bool) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') self.assert_in('boolean', app.jinja_env.tests.keys()) self.assert_equal(app.jinja_env.tests['boolean'], is_boolean) self.assert_true(app.jinja_env.tests['boolean'](False)) def test_add_template_test_with_name(self): bp = flask.Blueprint('bp', __name__) def is_boolean(value): return isinstance(value, bool) bp.add_app_template_test(is_boolean, 'boolean') app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') self.assert_in('boolean', app.jinja_env.tests.keys()) self.assert_equal(app.jinja_env.tests['boolean'], is_boolean) self.assert_true(app.jinja_env.tests['boolean'](False)) def test_template_test_with_template(self): bp = flask.Blueprint('bp', __name__) @bp.app_template_test() def boolean(value): return isinstance(value, bool) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') self.assert_in(b'Success!', rv.data) def test_template_test_after_route_with_template(self): app = flask.Flask(__name__) @app.route('/') def index(): return flask.render_template('template_test.html', value=False) bp = flask.Blueprint('bp', __name__) @bp.app_template_test() def boolean(value): return isinstance(value, bool) app.register_blueprint(bp, url_prefix='/py') rv = app.test_client().get('/') self.assert_in(b'Success!', rv.data) def test_add_template_test_with_template(self): bp = flask.Blueprint('bp', __name__) def boolean(value): return isinstance(value, bool) bp.add_app_template_test(boolean) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') self.assert_in(b'Success!', rv.data) def test_template_test_with_name_and_template(self): bp = flask.Blueprint('bp', __name__) @bp.app_template_test('boolean') def is_boolean(value): return isinstance(value, bool) app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') self.assert_in(b'Success!', rv.data) def test_add_template_test_with_name_and_template(self): bp = flask.Blueprint('bp', __name__) def is_boolean(value): return isinstance(value, bool) bp.add_app_template_test(is_boolean, 'boolean') app = flask.Flask(__name__) app.register_blueprint(bp, url_prefix='/py') @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') self.assert_in(b'Success!', rv.data) def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(BlueprintTestCase)) suite.addTest(unittest.makeSuite(ModuleTestCase)) return suite
gpl-3.0
r39132/airflow
airflow/utils/asciiart.py
5
2542
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # bug = r""" =, .= =.| ,---. |.= =.| "-(:::::)-" |.= \\__/`-.|.-'\__// `-| .::| .::|-' Pillendreher _|`-._|_.-'|_ (Scarabaeus sacer) /.-| | .::|-.\ // ,| .::|::::|. \\ || //\::::|::' /\\ || /'\|| `.__|__.' ||/'\ ^ \\ // ^ /'\ /'\ ^ ^ """ nukular = r""" ____/ ( ( ) ) \___ /( ( ( ) _ )) ) )\ (( ( )( ) ) ( ) ) ((/ ( _( ) ( _) ) ( () ) ) ( ( ( (_) (( ( ) .((_ ) . )_ ( ( ) ( ( ) ) ) . ) ( ) ( ( ( ( ) ( _ ( _) ). ) . ) ) ( ) ( ( ( ) ( ) ( )) ) _)( ) ) ) ( ( ( \ ) ( (_ ( ) ( ) ) ) ) )) ( ) ( ( ( ( (_ ( ) ( _ ) ) ( ) ) ) ( ( ( ( ( ) (_ ) ) ) _) ) _( ( ) (( ( )( ( _ ) _) _(_ ( (_ ) (_((__(_(__(( ( ( | ) ) ) )_))__))_)___) ((__) \\||lll|l||/// \_)) ( /(/ ( ) ) )\ ) ( ( ( ( | | ) ) )\ ) ( /(| / ( )) ) ) )) ) ( ( ((((_(|)_))))) ) ( ||\(|(|)|/|| ) ( |(||(||)|||| ) ( //|/l|||)|\\ \ ) (/ / // /|//||||\\ \ \ \ _) ------------------------------------------------------------------------------- """
apache-2.0
dbtsai/spark
examples/src/main/python/mllib/isotonic_regression_example.py
27
2341
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Isotonic Regression Example. """ from pyspark import SparkContext # $example on$ import math from pyspark.mllib.regression import IsotonicRegression, IsotonicRegressionModel from pyspark.mllib.util import MLUtils # $example off$ if __name__ == "__main__": sc = SparkContext(appName="PythonIsotonicRegressionExample") # $example on$ # Load and parse the data def parsePoint(labeledData): return (labeledData.label, labeledData.features[0], 1.0) data = MLUtils.loadLibSVMFile(sc, "data/mllib/sample_isotonic_regression_libsvm_data.txt") # Create label, feature, weight tuples from input data with weight set to default value 1.0. parsedData = data.map(parsePoint) # Split data into training (60%) and test (40%) sets. training, test = parsedData.randomSplit([0.6, 0.4], 11) # Create isotonic regression model from training data. # Isotonic parameter defaults to true so it is only shown for demonstration model = IsotonicRegression.train(training) # Create tuples of predicted and real labels. predictionAndLabel = test.map(lambda p: (model.predict(p[1]), p[0])) # Calculate mean squared error between predicted and real labels. meanSquaredError = predictionAndLabel.map(lambda pl: math.pow((pl[0] - pl[1]), 2)).mean() print("Mean Squared Error = " + str(meanSquaredError)) # Save and load model model.save(sc, "target/tmp/myIsotonicRegressionModel") sameModel = IsotonicRegressionModel.load(sc, "target/tmp/myIsotonicRegressionModel") # $example off$
apache-2.0
thnee/ansible
lib/ansible/modules/cloud/oneandone/oneandone_firewall_policy.py
21
18690
#!/usr/bin/python # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: oneandone_firewall_policy short_description: Configure 1&1 firewall policy. description: - Create, remove, reconfigure, update firewall policies. This module has a dependency on 1and1 >= 1.0 version_added: "2.5" options: state: description: - Define a firewall policy state to create, remove, or update. required: false default: 'present' choices: [ "present", "absent", "update" ] auth_token: description: - Authenticating API token provided by 1&1. required: true api_url: description: - Custom API URL. Overrides the ONEANDONE_API_URL environment variable. required: false name: description: - Firewall policy name used with present state. Used as identifier (id or name) when used with absent state. maxLength=128 required: true firewall_policy: description: - The identifier (id or name) of the firewall policy used with update state. required: true rules: description: - A list of rules that will be set for the firewall policy. Each rule must contain protocol parameter, in addition to three optional parameters (port_from, port_to, and source) add_server_ips: description: - A list of server identifiers (id or name) to be assigned to a firewall policy. Used in combination with update state. required: false remove_server_ips: description: - A list of server IP ids to be unassigned from a firewall policy. Used in combination with update state. required: false add_rules: description: - A list of rules that will be added to an existing firewall policy. It is syntax is the same as the one used for rules parameter. Used in combination with update state. required: false remove_rules: description: - A list of rule ids that will be removed from an existing firewall policy. Used in combination with update state. required: false description: description: - Firewall policy description. maxLength=256 required: false wait: description: - wait for the instance to be in state 'running' before returning required: false default: "yes" type: bool wait_timeout: description: - how long before wait gives up, in seconds default: 600 wait_interval: description: - Defines the number of seconds to wait when using the _wait_for methods default: 5 requirements: - "1and1" - "python >= 2.6" author: - "Amel Ajdinovic (@aajdinov)" - "Ethan Devenport (@edevenport)" ''' EXAMPLES = ''' # Provisioning example. Create and destroy a firewall policy. - oneandone_firewall_policy: auth_token: oneandone_private_api_key name: ansible-firewall-policy description: Testing creation of firewall policies with ansible rules: - protocol: TCP port_from: 80 port_to: 80 source: 0.0.0.0 wait: true wait_timeout: 500 - oneandone_firewall_policy: auth_token: oneandone_private_api_key state: absent name: ansible-firewall-policy # Update a firewall policy. - oneandone_firewall_policy: auth_token: oneandone_private_api_key state: update firewall_policy: ansible-firewall-policy name: ansible-firewall-policy-updated description: Testing creation of firewall policies with ansible - updated # Add server to a firewall policy. - oneandone_firewall_policy: auth_token: oneandone_private_api_key firewall_policy: ansible-firewall-policy-updated add_server_ips: - server_identifier (id or name) - server_identifier #2 (id or name) wait: true wait_timeout: 500 state: update # Remove server from a firewall policy. - oneandone_firewall_policy: auth_token: oneandone_private_api_key firewall_policy: ansible-firewall-policy-updated remove_server_ips: - B2504878540DBC5F7634EB00A07C1EBD (server's IP id) wait: true wait_timeout: 500 state: update # Add rules to a firewall policy. - oneandone_firewall_policy: auth_token: oneandone_private_api_key firewall_policy: ansible-firewall-policy-updated description: Adding rules to an existing firewall policy add_rules: - protocol: TCP port_from: 70 port_to: 70 source: 0.0.0.0 - protocol: TCP port_from: 60 port_to: 60 source: 0.0.0.0 wait: true wait_timeout: 500 state: update # Remove rules from a firewall policy. - oneandone_firewall_policy: auth_token: oneandone_private_api_key firewall_policy: ansible-firewall-policy-updated remove_rules: - rule_id #1 - rule_id #2 - ... wait: true wait_timeout: 500 state: update ''' RETURN = ''' firewall_policy: description: Information about the firewall policy that was processed type: dict sample: '{"id": "92B74394A397ECC3359825C1656D67A6", "name": "Default Policy"}' returned: always ''' import os from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.oneandone import ( get_firewall_policy, get_server, OneAndOneResources, wait_for_resource_creation_completion ) HAS_ONEANDONE_SDK = True try: import oneandone.client except ImportError: HAS_ONEANDONE_SDK = False def _check_mode(module, result): if module.check_mode: module.exit_json( changed=result ) def _add_server_ips(module, oneandone_conn, firewall_id, server_ids): """ Assigns servers to a firewall policy. """ try: attach_servers = [] for _server_id in server_ids: server = get_server(oneandone_conn, _server_id, True) attach_server = oneandone.client.AttachServer( server_id=server['id'], server_ip_id=next(iter(server['ips'] or []), None)['id'] ) attach_servers.append(attach_server) if module.check_mode: if attach_servers: return True return False firewall_policy = oneandone_conn.attach_server_firewall_policy( firewall_id=firewall_id, server_ips=attach_servers) return firewall_policy except Exception as e: module.fail_json(msg=str(e)) def _remove_firewall_server(module, oneandone_conn, firewall_id, server_ip_id): """ Unassigns a server/IP from a firewall policy. """ try: if module.check_mode: firewall_server = oneandone_conn.get_firewall_server( firewall_id=firewall_id, server_ip_id=server_ip_id) if firewall_server: return True return False firewall_policy = oneandone_conn.remove_firewall_server( firewall_id=firewall_id, server_ip_id=server_ip_id) return firewall_policy except Exception as e: module.fail_json(msg=str(e)) def _add_firewall_rules(module, oneandone_conn, firewall_id, rules): """ Adds new rules to a firewall policy. """ try: firewall_rules = [] for rule in rules: firewall_rule = oneandone.client.FirewallPolicyRule( protocol=rule['protocol'], port_from=rule['port_from'], port_to=rule['port_to'], source=rule['source']) firewall_rules.append(firewall_rule) if module.check_mode: firewall_policy_id = get_firewall_policy(oneandone_conn, firewall_id) if (firewall_rules and firewall_policy_id): return True return False firewall_policy = oneandone_conn.add_firewall_policy_rule( firewall_id=firewall_id, firewall_policy_rules=firewall_rules ) return firewall_policy except Exception as e: module.fail_json(msg=str(e)) def _remove_firewall_rule(module, oneandone_conn, firewall_id, rule_id): """ Removes a rule from a firewall policy. """ try: if module.check_mode: rule = oneandone_conn.get_firewall_policy_rule( firewall_id=firewall_id, rule_id=rule_id) if rule: return True return False firewall_policy = oneandone_conn.remove_firewall_rule( firewall_id=firewall_id, rule_id=rule_id ) return firewall_policy except Exception as e: module.fail_json(msg=str(e)) def update_firewall_policy(module, oneandone_conn): """ Updates a firewall policy based on input arguments. Firewall rules and server ips can be added/removed to/from firewall policy. Firewall policy name and description can be updated as well. module : AnsibleModule object oneandone_conn: authenticated oneandone object """ try: firewall_policy_id = module.params.get('firewall_policy') name = module.params.get('name') description = module.params.get('description') add_server_ips = module.params.get('add_server_ips') remove_server_ips = module.params.get('remove_server_ips') add_rules = module.params.get('add_rules') remove_rules = module.params.get('remove_rules') changed = False firewall_policy = get_firewall_policy(oneandone_conn, firewall_policy_id, True) if firewall_policy is None: _check_mode(module, False) if name or description: _check_mode(module, True) firewall_policy = oneandone_conn.modify_firewall( firewall_id=firewall_policy['id'], name=name, description=description) changed = True if add_server_ips: if module.check_mode: _check_mode(module, _add_server_ips(module, oneandone_conn, firewall_policy['id'], add_server_ips)) firewall_policy = _add_server_ips(module, oneandone_conn, firewall_policy['id'], add_server_ips) changed = True if remove_server_ips: chk_changed = False for server_ip_id in remove_server_ips: if module.check_mode: chk_changed |= _remove_firewall_server(module, oneandone_conn, firewall_policy['id'], server_ip_id) _remove_firewall_server(module, oneandone_conn, firewall_policy['id'], server_ip_id) _check_mode(module, chk_changed) firewall_policy = get_firewall_policy(oneandone_conn, firewall_policy['id'], True) changed = True if add_rules: firewall_policy = _add_firewall_rules(module, oneandone_conn, firewall_policy['id'], add_rules) _check_mode(module, firewall_policy) changed = True if remove_rules: chk_changed = False for rule_id in remove_rules: if module.check_mode: chk_changed |= _remove_firewall_rule(module, oneandone_conn, firewall_policy['id'], rule_id) _remove_firewall_rule(module, oneandone_conn, firewall_policy['id'], rule_id) _check_mode(module, chk_changed) firewall_policy = get_firewall_policy(oneandone_conn, firewall_policy['id'], True) changed = True return (changed, firewall_policy) except Exception as e: module.fail_json(msg=str(e)) def create_firewall_policy(module, oneandone_conn): """ Create a new firewall policy. module : AnsibleModule object oneandone_conn: authenticated oneandone object """ try: name = module.params.get('name') description = module.params.get('description') rules = module.params.get('rules') wait = module.params.get('wait') wait_timeout = module.params.get('wait_timeout') wait_interval = module.params.get('wait_interval') firewall_rules = [] for rule in rules: firewall_rule = oneandone.client.FirewallPolicyRule( protocol=rule['protocol'], port_from=rule['port_from'], port_to=rule['port_to'], source=rule['source']) firewall_rules.append(firewall_rule) firewall_policy_obj = oneandone.client.FirewallPolicy( name=name, description=description ) _check_mode(module, True) firewall_policy = oneandone_conn.create_firewall_policy( firewall_policy=firewall_policy_obj, firewall_policy_rules=firewall_rules ) if wait: wait_for_resource_creation_completion( oneandone_conn, OneAndOneResources.firewall_policy, firewall_policy['id'], wait_timeout, wait_interval) firewall_policy = get_firewall_policy(oneandone_conn, firewall_policy['id'], True) # refresh changed = True if firewall_policy else False _check_mode(module, False) return (changed, firewall_policy) except Exception as e: module.fail_json(msg=str(e)) def remove_firewall_policy(module, oneandone_conn): """ Removes a firewall policy. module : AnsibleModule object oneandone_conn: authenticated oneandone object """ try: fp_id = module.params.get('name') firewall_policy_id = get_firewall_policy(oneandone_conn, fp_id) if module.check_mode: if firewall_policy_id is None: _check_mode(module, False) _check_mode(module, True) firewall_policy = oneandone_conn.delete_firewall(firewall_policy_id) changed = True if firewall_policy else False return (changed, { 'id': firewall_policy['id'], 'name': firewall_policy['name'] }) except Exception as e: module.fail_json(msg=str(e)) def main(): module = AnsibleModule( argument_spec=dict( auth_token=dict( type='str', default=os.environ.get('ONEANDONE_AUTH_TOKEN')), api_url=dict( type='str', default=os.environ.get('ONEANDONE_API_URL')), name=dict(type='str'), firewall_policy=dict(type='str'), description=dict(type='str'), rules=dict(type='list', default=[]), add_server_ips=dict(type='list', default=[]), remove_server_ips=dict(type='list', default=[]), add_rules=dict(type='list', default=[]), remove_rules=dict(type='list', default=[]), wait=dict(type='bool', default=True), wait_timeout=dict(type='int', default=600), wait_interval=dict(type='int', default=5), state=dict(type='str', default='present', choices=['present', 'absent', 'update']), ), supports_check_mode=True ) if not HAS_ONEANDONE_SDK: module.fail_json(msg='1and1 required for this module') if not module.params.get('auth_token'): module.fail_json( msg='The "auth_token" parameter or ' + 'ONEANDONE_AUTH_TOKEN environment variable is required.') if not module.params.get('api_url'): oneandone_conn = oneandone.client.OneAndOneService( api_token=module.params.get('auth_token')) else: oneandone_conn = oneandone.client.OneAndOneService( api_token=module.params.get('auth_token'), api_url=module.params.get('api_url')) state = module.params.get('state') if state == 'absent': if not module.params.get('name'): module.fail_json( msg="'name' parameter is required to delete a firewall policy.") try: (changed, firewall_policy) = remove_firewall_policy(module, oneandone_conn) except Exception as e: module.fail_json(msg=str(e)) elif state == 'update': if not module.params.get('firewall_policy'): module.fail_json( msg="'firewall_policy' parameter is required to update a firewall policy.") try: (changed, firewall_policy) = update_firewall_policy(module, oneandone_conn) except Exception as e: module.fail_json(msg=str(e)) elif state == 'present': for param in ('name', 'rules'): if not module.params.get(param): module.fail_json( msg="%s parameter is required for new firewall policies." % param) try: (changed, firewall_policy) = create_firewall_policy(module, oneandone_conn) except Exception as e: module.fail_json(msg=str(e)) module.exit_json(changed=changed, firewall_policy=firewall_policy) if __name__ == '__main__': main()
gpl-3.0
evewspace/eve-wspace
evewspace/SiteTracker/models.py
6
10750
# Eve W-Space # Copyright 2014 Andrew Austin and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from django.db import models from django.conf import settings from Map.models import Map, System, MapSystem from core.utils import get_config from datetime import datetime import pytz # Create your models here. User = settings.AUTH_USER_MODEL class Fleet(models.Model): """Represents a SiteTracker fleet.""" system = models.ForeignKey(System, related_name="stfleets") initial_boss = models.ForeignKey(User, related_name="bossfleets") current_boss = models.ForeignKey(User, related_name="currently_bossing") started = models.DateTimeField(auto_now_add=True) ended = models.DateTimeField(blank=True, null=True) roles_needed = models.ManyToManyField('SiteRole', related_name="fleets_need") class Meta: permissions = (("can_sitetracker", "Use the Site Tracker system."),) def __unicode__(self): return u"MapSystem: %s Boss: %s Started: %s Ended: %s" % (self.system.name, self.current_boss.username, self.started, self.ended) def __contains__(self, user): """ Allow for user in fleet syntax to determine both inactive and active user records for a fleet. """ if user is None: return False return self.members.filter(user=user).exists() def __iter__(self): """ Provide support for syntax: for User in fleet """ for member_rec in self.members.all(): yield member_rec.user def credit_site(self, site_type, system, boss): """ Credits a site. """ # Get the fleet member weighting variable and multiplier x = float(get_config("ST_SIZE_WEIGHT", None).value) n = self.members.filter(leavetime=None).count() if x > 1: weight_factor = x / float(n + (x - 1)) else: # If the factor is set to anything equal to or less than 1, # we will not weight the results by fleet size weight_factor = float(1) if SystemWeight.objects.filter(system=system).count(): weight_factor = weight_factor * system.st_weight.weight raw_points = SiteWeight.objects.get(site_type=site_type, sysclass=system.sysclass).raw_points site = SiteRecord(fleet=self, site_type=site_type, system=system, boss=boss, fleetsize=self.members.filter(leavetime=None).count(), raw_points=raw_points, weighted_points = raw_points * weight_factor) site.save() for user in self.members.filter(leavetime=None).all(): site.members.add(UserSite(site=site, user=user.user, pending=False)) return site def close_fleet(self): """ Closes the SiteTracker fleet. """ for member in self.members.filter(leavetime=None): member.leavetime = datetime.now(pytz.utc) member.save() self.ended = datetime.now(pytz.utc) self.save() def join_fleet(self, user): """ Adds user to fleet. """ if not self.members.filter(user=user, leavetime=None).count(): u = UserLog(fleet=self, user=user).save() else: u = self.members.get(user=user, leavetime=None) return u def active_members(self): """ Return a list of active members. """ return self.members.filter(leavetime=None) def leave_fleet(self, user): """ Removes user from fleet. """ if self.members.filter(leavetime=None).count() == 1: # We're the only member left, close the fleet. self.close_fleet() return None elif self.current_boss == user: # We're the boss, give it to the first schmuck we can. self.current_boss = self.members.exclude(user=user).filter( leavetime=None).all()[0].user self.save() UserLog.objects.filter(fleet=self, user=user, leavetime=None).update(leavetime=datetime.now(pytz.utc)) def make_boss(self, user): """ Change the current fleet boss. """ self.current_boss = user self.save() class SiteRole(models.Model): """Represents a role for a sitetracker fleet.""" short_name = models.CharField(max_length=32, unique=True) long_name = models.CharField(max_length=255, unique=True) class SiteType(models.Model): """Represents a type of site that can be credited.""" shortname = models.CharField(max_length=8, unique=True) longname = models.CharField(max_length=80, unique=True) # Defunct site types are maintained in the databse for relational purposes but can no longer be credited defunct = models.BooleanField(default=False) def __unicode__(self): return self.longname class SiteWeight(models.Model): """ Represents the raw points available for a site type / system class combo """ site_type = models.ForeignKey(SiteType, related_name='weights') sysclass = models.IntegerField(choices=[(1, "C1"), (2, "C2"), (3, "C3"), (4, "C4"), (5, "C5"), (6, "C6"), (7, "High Sec"), (8, "Low Sec"), (9, "Null Sec"), (10, "Jove"), (11, "Jove"), (12, "Thera"), (13, "Small Ship")]) raw_points = models.IntegerField() class SystemWeight(models.Model): """ Respresents a multiplier for site credit for a system. """ system = models.OneToOneField(System, primary_key=True, related_name='st_weight') weight = models.FloatField() class SiteRecord(models.Model): """Represents the record of a site run.""" fleet = models.ForeignKey(Fleet, related_name="sites") site_type = models.ForeignKey(SiteType, related_name="sitesrun") timestamp = models.DateTimeField(auto_now_add=True) system = models.ForeignKey(System, related_name="sitescompleted") boss = models.ForeignKey(User, related_name="sitescredited") fleetsize = models.IntegerField() raw_points = models.IntegerField() weighted_points = models.FloatField() def __unicode__(self): return u"System: %s Time: %s Type: %s" % (self.system.name, self.timestamp, self.type.shortname) def __contains__(self, user): """ Allow for if user in siterecord to determine if a user has an entry. """ if user is None: return False return self.members.filter(user=user).exists() def __iter__(self): """ Allow for syntax: for user in siterecord. """ for log in self.members.all(): yield log.user def is_pending(self, user): """ Return True if user's credit is pending. """ return self.members.get(user=user).pending class UserSite(models.Model): """Represents a user's credit for a site.""" site = models.ForeignKey(SiteRecord, related_name="members") user = models.ForeignKey(User, related_name="sites") pending = models.BooleanField(default=False) def approve(self): """ Mark the site approved. """ new_fleetsize = self.site.fleetsize + 1 x = float(get_config("ST_SIZE_WEIGHT", None).value) n = new_fleetsize if x > 1: weight_factor = x / float(n + (x - 1)) else: weight_factor = float(1) self.site.fleetsize = new_fleetsize self.site.weighted_points = self.site.raw_points * weight_factor self.site.save() self.pending = False self.save() class UserLog(models.Model): """Represents a user's sitetracker log.""" fleet = models.ForeignKey(Fleet, related_name="members") user = models.ForeignKey(User, related_name="sitetrackerlogs") jointime = models.DateTimeField(auto_now_add=True) leavetime = models.DateTimeField(null=True, blank=True) def pending_sites(self): """ Returns a list of site records which are pending credit. """ pending_sites = [] for site in self.fleet.sites.all(): if UserSite.objects.filter(user=self.user, site=site, pending=True).exists(): pending_sites.append(site) return pending_sites class ClaimPeriod(models.Model): """Represents a claim period that Users can claim against.""" starttime = models.DateTimeField() endtime = models.DateTimeField() name = models.CharField(max_length = 80) closetime = models.DateTimeField(blank=True, null=True) loothauledby = models.ForeignKey(User, related_name="loothauled", null=True, blank=True) lootsoldby = models.ForeignKey(User, related_name="lootsold", null=True, blank=True) class Meta: permissions = (("can_close_claims", "Close the claims period early."), ("can_reopen_claims", "Reopen the claims period."), ("can_haul_loot", "Mark the claim period as hauled."), ("can_sell_loot", "Mark the claim period as sold."),) def __unicode__(self): return self.name class Claim(models.Model): """Represents a User's claim for a claim period.""" period = models.ForeignKey(ClaimPeriod, related_name="claims") user = models.ForeignKey(User, related_name="claims") shareclaimed = models.FloatField() description = models.TextField() bonus = models.FloatField(blank=True, null=True) class PayoutReport(models.Model): """Represents a payout report and contains general information about the payout period.""" period = models.ForeignKey(ClaimPeriod, related_name="reports") createdby = models.ForeignKey(User, related_name="payoutreports") grossprofit = models.BigIntegerField() datepaid = models.DateTimeField(blank=True, null=True) class PayoutEntry(models.Model): """Represents an entry in the payout report.""" report = models.ForeignKey(PayoutReport, related_name="entries") user = models.ForeignKey(User, related_name="payouts") claim = models.ForeignKey(Claim, related_name="payout") iskshare = models.BigIntegerField()
apache-2.0
mKeRix/home-assistant
tests/components/upnp/mock_device.py
14
2050
"""Mock device for testing purposes.""" from typing import Mapping from homeassistant.components.upnp.const import ( BYTES_RECEIVED, BYTES_SENT, PACKETS_RECEIVED, PACKETS_SENT, TIMESTAMP, ) from homeassistant.components.upnp.device import Device import homeassistant.util.dt as dt_util class MockDevice(Device): """Mock device for Device.""" def __init__(self, udn): """Initialize mock device.""" igd_device = object() super().__init__(igd_device) self._udn = udn self.added_port_mappings = [] self.removed_port_mappings = [] @classmethod async def async_create_device(cls, hass, ssdp_location): """Return self.""" return cls("UDN") @property def udn(self) -> str: """Get the UDN.""" return self._udn @property def manufacturer(self) -> str: """Get manufacturer.""" return "mock-manufacturer" @property def name(self) -> str: """Get name.""" return "mock-name" @property def model_name(self) -> str: """Get the model name.""" return "mock-model-name" @property def device_type(self) -> str: """Get the device type.""" return "urn:schemas-upnp-org:device:InternetGatewayDevice:1" async def _async_add_port_mapping( self, external_port: int, local_ip: str, internal_port: int ) -> None: """Add a port mapping.""" entry = [external_port, local_ip, internal_port] self.added_port_mappings.append(entry) async def _async_delete_port_mapping(self, external_port: int) -> None: """Remove a port mapping.""" entry = external_port self.removed_port_mappings.append(entry) async def async_get_traffic_data(self) -> Mapping[str, any]: """Get traffic data.""" return { TIMESTAMP: dt_util.utcnow(), BYTES_RECEIVED: 0, BYTES_SENT: 0, PACKETS_RECEIVED: 0, PACKETS_SENT: 0, }
mit
lsqtongxin/ryu
ryu/services/protocols/bgp/info_base/vpn.py
34
3720
# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Defines base data types and models required specifically for VPN support. """ import abc import logging from ryu.services.protocols.bgp.info_base.base import Destination from ryu.services.protocols.bgp.info_base.base import NonVrfPathProcessingMixin from ryu.services.protocols.bgp.info_base.base import Path from ryu.services.protocols.bgp.info_base.base import Table LOG = logging.getLogger('bgpspeaker.info_base.vpn') class VpnTable(Table): """Global table to store VPNv4 routing information. Uses `VpnvXDest` to store destination information for each known vpnvX paths. """ ROUTE_FAMILY = None VPN_DEST_CLASS = None def __init__(self, core_service, signal_bus): super(VpnTable, self).__init__(None, core_service, signal_bus) def _table_key(self, vpn_nlri): """Return a key that will uniquely identify this vpnvX NLRI inside this table. """ return vpn_nlri.route_dist + ':' + vpn_nlri.prefix def _create_dest(self, nlri): return self.VPN_DEST_CLASS(self, nlri) def __str__(self): return '%s(scope_id: %s, rf: %s)' % ( self.__class__.__name__, self.scope_id, self.route_family ) class VpnPath(Path): __metaclass__ = abc.ABCMeta ROUTE_FAMILY = None VRF_PATH_CLASS = None NLRI_CLASS = None def clone_to_vrf(self, is_withdraw=False): vrf_nlri = self.NLRI_CLASS(self._nlri.prefix) pathattrs = None if not is_withdraw: pathattrs = self.pathattr_map vrf_path = self.VRF_PATH_CLASS( self.VRF_PATH_CLASS.create_puid( self._nlri.route_dist, self._nlri.prefix ), self.source, vrf_nlri, self.source_version_num, pattrs=pathattrs, nexthop=self.nexthop, is_withdraw=is_withdraw, label_list=self._nlri.label_list) return vrf_path class VpnDest(Destination, NonVrfPathProcessingMixin): """Base class for VPN destinations.""" __metaclass__ = abc.ABCMeta def _best_path_lost(self): old_best_path = self._best_path NonVrfPathProcessingMixin._best_path_lost(self) self._core_service._signal_bus.best_path_changed(old_best_path, True) # Best-path might have been imported into VRF tables, we have to # withdraw from them, if the source is a peer. if old_best_path: withdraw_clone = old_best_path.clone(for_withdrawal=True) tm = self._core_service.table_manager tm.import_single_vpn_path_to_all_vrfs( withdraw_clone, path_rts=old_best_path.get_rts() ) def _new_best_path(self, best_path): NonVrfPathProcessingMixin._new_best_path(self, best_path) self._core_service._signal_bus.best_path_changed(best_path, False) # Extranet feature requires that we import new best path into VRFs. tm = self._core_service.table_manager tm.import_single_vpn_path_to_all_vrfs( self._best_path, self._best_path.get_rts())
apache-2.0
person142/scipy
scipy/fft/tests/test_helper.py
7
9706
from scipy.fft._helper import next_fast_len, _init_nd_shape_and_axes from numpy.testing import assert_equal, assert_array_equal from pytest import raises as assert_raises import pytest import numpy as np import sys _5_smooth_numbers = [ 2, 3, 4, 5, 6, 8, 9, 10, 2 * 3 * 5, 2**3 * 3**5, 2**3 * 3**3 * 5**2, ] def test_next_fast_len(): for n in _5_smooth_numbers: assert_equal(next_fast_len(n), n) def _assert_n_smooth(x, n): x_orig = x if n < 2: assert False while True: q, r = divmod(x, 2) if r != 0: break x = q for d in range(3, n+1, 2): while True: q, r = divmod(x, d) if r != 0: break x = q assert x == 1, \ 'x={} is not {}-smooth, remainder={}'.format(x_orig, n, x) class TestNextFastLen(object): def test_next_fast_len(self): np.random.seed(1234) def nums(): for j in range(1, 1000): yield j yield 2**5 * 3**5 * 4**5 + 1 for n in nums(): m = next_fast_len(n) _assert_n_smooth(m, 11) assert m == next_fast_len(n, False) m = next_fast_len(n, True) _assert_n_smooth(m, 5) def test_np_integers(self): ITYPES = [np.int16, np.int32, np.int64, np.uint16, np.uint32, np.uint64] for ityp in ITYPES: x = ityp(12345) testN = next_fast_len(x) assert_equal(testN, next_fast_len(int(x))) def testnext_fast_len_small(self): hams = { 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 8, 8: 8, 14: 15, 15: 15, 16: 16, 17: 18, 1021: 1024, 1536: 1536, 51200000: 51200000 } for x, y in hams.items(): assert_equal(next_fast_len(x, True), y) @pytest.mark.xfail(sys.maxsize < 2**32, reason="Hamming Numbers too large for 32-bit", raises=ValueError, strict=True) def testnext_fast_len_big(self): hams = { 510183360: 510183360, 510183360 + 1: 512000000, 511000000: 512000000, 854296875: 854296875, 854296875 + 1: 859963392, 196608000000: 196608000000, 196608000000 + 1: 196830000000, 8789062500000: 8789062500000, 8789062500000 + 1: 8796093022208, 206391214080000: 206391214080000, 206391214080000 + 1: 206624260800000, 470184984576000: 470184984576000, 470184984576000 + 1: 470715894135000, 7222041363087360: 7222041363087360, 7222041363087360 + 1: 7230196133913600, # power of 5 5**23 11920928955078125: 11920928955078125, 11920928955078125 - 1: 11920928955078125, # power of 3 3**34 16677181699666569: 16677181699666569, 16677181699666569 - 1: 16677181699666569, # power of 2 2**54 18014398509481984: 18014398509481984, 18014398509481984 - 1: 18014398509481984, # above this, int(ceil(n)) == int(ceil(n+1)) 19200000000000000: 19200000000000000, 19200000000000000 + 1: 19221679687500000, 288230376151711744: 288230376151711744, 288230376151711744 + 1: 288325195312500000, 288325195312500000 - 1: 288325195312500000, 288325195312500000: 288325195312500000, 288325195312500000 + 1: 288555831593533440, } for x, y in hams.items(): assert_equal(next_fast_len(x, True), y) class Test_init_nd_shape_and_axes(object): def test_py_0d_defaults(self): x = np.array(4) shape = None axes = None shape_expected = np.array([]) axes_expected = np.array([]) shape_res, axes_res = _init_nd_shape_and_axes(x, shape, axes) assert_equal(shape_res, shape_expected) assert_equal(axes_res, axes_expected) def test_np_0d_defaults(self): x = np.array(7.) shape = None axes = None shape_expected = np.array([]) axes_expected = np.array([]) shape_res, axes_res = _init_nd_shape_and_axes(x, shape, axes) assert_equal(shape_res, shape_expected) assert_equal(axes_res, axes_expected) def test_py_1d_defaults(self): x = np.array([1, 2, 3]) shape = None axes = None shape_expected = np.array([3]) axes_expected = np.array([0]) shape_res, axes_res = _init_nd_shape_and_axes(x, shape, axes) assert_equal(shape_res, shape_expected) assert_equal(axes_res, axes_expected) def test_np_1d_defaults(self): x = np.arange(0, 1, .1) shape = None axes = None shape_expected = np.array([10]) axes_expected = np.array([0]) shape_res, axes_res = _init_nd_shape_and_axes(x, shape, axes) assert_equal(shape_res, shape_expected) assert_equal(axes_res, axes_expected) def test_py_2d_defaults(self): x = np.array([[1, 2, 3, 4], [5, 6, 7, 8]]) shape = None axes = None shape_expected = np.array([2, 4]) axes_expected = np.array([0, 1]) shape_res, axes_res = _init_nd_shape_and_axes(x, shape, axes) assert_equal(shape_res, shape_expected) assert_equal(axes_res, axes_expected) def test_np_2d_defaults(self): x = np.arange(0, 1, .1).reshape(5, 2) shape = None axes = None shape_expected = np.array([5, 2]) axes_expected = np.array([0, 1]) shape_res, axes_res = _init_nd_shape_and_axes(x, shape, axes) assert_equal(shape_res, shape_expected) assert_equal(axes_res, axes_expected) def test_np_5d_defaults(self): x = np.zeros([6, 2, 5, 3, 4]) shape = None axes = None shape_expected = np.array([6, 2, 5, 3, 4]) axes_expected = np.array([0, 1, 2, 3, 4]) shape_res, axes_res = _init_nd_shape_and_axes(x, shape, axes) assert_equal(shape_res, shape_expected) assert_equal(axes_res, axes_expected) def test_np_5d_set_shape(self): x = np.zeros([6, 2, 5, 3, 4]) shape = [10, -1, -1, 1, 4] axes = None shape_expected = np.array([10, 2, 5, 1, 4]) axes_expected = np.array([0, 1, 2, 3, 4]) shape_res, axes_res = _init_nd_shape_and_axes(x, shape, axes) assert_equal(shape_res, shape_expected) assert_equal(axes_res, axes_expected) def test_np_5d_set_axes(self): x = np.zeros([6, 2, 5, 3, 4]) shape = None axes = [4, 1, 2] shape_expected = np.array([4, 2, 5]) axes_expected = np.array([4, 1, 2]) shape_res, axes_res = _init_nd_shape_and_axes(x, shape, axes) assert_equal(shape_res, shape_expected) assert_equal(axes_res, axes_expected) def test_np_5d_set_shape_axes(self): x = np.zeros([6, 2, 5, 3, 4]) shape = [10, -1, 2] axes = [1, 0, 3] shape_expected = np.array([10, 6, 2]) axes_expected = np.array([1, 0, 3]) shape_res, axes_res = _init_nd_shape_and_axes(x, shape, axes) assert_equal(shape_res, shape_expected) assert_equal(axes_res, axes_expected) def test_shape_axes_subset(self): x = np.zeros((2, 3, 4, 5)) shape, axes = _init_nd_shape_and_axes(x, shape=(5, 5, 5), axes=None) assert_array_equal(shape, [5, 5, 5]) assert_array_equal(axes, [1, 2, 3]) def test_errors(self): x = np.zeros(1) with assert_raises(ValueError, match="axes must be a scalar or " "iterable of integers"): _init_nd_shape_and_axes(x, shape=None, axes=[[1, 2], [3, 4]]) with assert_raises(ValueError, match="axes must be a scalar or " "iterable of integers"): _init_nd_shape_and_axes(x, shape=None, axes=[1., 2., 3., 4.]) with assert_raises(ValueError, match="axes exceeds dimensionality of input"): _init_nd_shape_and_axes(x, shape=None, axes=[1]) with assert_raises(ValueError, match="axes exceeds dimensionality of input"): _init_nd_shape_and_axes(x, shape=None, axes=[-2]) with assert_raises(ValueError, match="all axes must be unique"): _init_nd_shape_and_axes(x, shape=None, axes=[0, 0]) with assert_raises(ValueError, match="shape must be a scalar or " "iterable of integers"): _init_nd_shape_and_axes(x, shape=[[1, 2], [3, 4]], axes=None) with assert_raises(ValueError, match="shape must be a scalar or " "iterable of integers"): _init_nd_shape_and_axes(x, shape=[1., 2., 3., 4.], axes=None) with assert_raises(ValueError, match="when given, axes and shape arguments" " have to be of the same length"): _init_nd_shape_and_axes(np.zeros([1, 1, 1, 1]), shape=[1, 2, 3], axes=[1]) with assert_raises(ValueError, match="invalid number of data points" r" \(\[0\]\) specified"): _init_nd_shape_and_axes(x, shape=[0], axes=None) with assert_raises(ValueError, match="invalid number of data points" r" \(\[-2\]\) specified"): _init_nd_shape_and_axes(x, shape=-2, axes=None)
bsd-3-clause
drjeep/django
tests/admin_changelist/models.py
276
2890
from django.db import models from django.utils.encoding import python_2_unicode_compatible class Event(models.Model): # Oracle can have problems with a column named "date" date = models.DateField(db_column="event_date") class Parent(models.Model): name = models.CharField(max_length=128) class Child(models.Model): parent = models.ForeignKey(Parent, models.SET_NULL, editable=False, null=True) name = models.CharField(max_length=30, blank=True) age = models.IntegerField(null=True, blank=True) class Genre(models.Model): name = models.CharField(max_length=20) class Band(models.Model): name = models.CharField(max_length=20) nr_of_members = models.PositiveIntegerField() genres = models.ManyToManyField(Genre) @python_2_unicode_compatible class Musician(models.Model): name = models.CharField(max_length=30) def __str__(self): return self.name @python_2_unicode_compatible class Group(models.Model): name = models.CharField(max_length=30) members = models.ManyToManyField(Musician, through='Membership') def __str__(self): return self.name class Concert(models.Model): name = models.CharField(max_length=30) group = models.ForeignKey(Group, models.CASCADE) class Membership(models.Model): music = models.ForeignKey(Musician, models.CASCADE) group = models.ForeignKey(Group, models.CASCADE) role = models.CharField(max_length=15) class Quartet(Group): pass class ChordsMusician(Musician): pass class ChordsBand(models.Model): name = models.CharField(max_length=30) members = models.ManyToManyField(ChordsMusician, through='Invitation') class Invitation(models.Model): player = models.ForeignKey(ChordsMusician, models.CASCADE) band = models.ForeignKey(ChordsBand, models.CASCADE) instrument = models.CharField(max_length=15) class Swallow(models.Model): origin = models.CharField(max_length=255) load = models.FloatField() speed = models.FloatField() class Meta: ordering = ('speed', 'load') class SwallowOneToOne(models.Model): swallow = models.OneToOneField(Swallow, models.CASCADE) class UnorderedObject(models.Model): """ Model without any defined `Meta.ordering`. Refs #17198. """ bool = models.BooleanField(default=True) class OrderedObjectManager(models.Manager): def get_queryset(self): return super(OrderedObjectManager, self).get_queryset().order_by('number') class OrderedObject(models.Model): """ Model with Manager that defines a default order. Refs #17198. """ name = models.CharField(max_length=255) bool = models.BooleanField(default=True) number = models.IntegerField(default=0, db_column='number_val') objects = OrderedObjectManager() class CustomIdUser(models.Model): uuid = models.AutoField(primary_key=True)
bsd-3-clause
wolfram74/numerical_methods_iserles_notes
venv/lib/python2.7/site-packages/tornado/platform/caresresolver.py
193
3092
from __future__ import absolute_import, division, print_function, with_statement import pycares import socket from tornado import gen from tornado.ioloop import IOLoop from tornado.netutil import Resolver, is_valid_ip class CaresResolver(Resolver): """Name resolver based on the c-ares library. This is a non-blocking and non-threaded resolver. It may not produce the same results as the system resolver, but can be used for non-blocking resolution when threads cannot be used. c-ares fails to resolve some names when ``family`` is ``AF_UNSPEC``, so it is only recommended for use in ``AF_INET`` (i.e. IPv4). This is the default for ``tornado.simple_httpclient``, but other libraries may default to ``AF_UNSPEC``. .. versionchanged:: 4.1 The ``io_loop`` argument is deprecated. """ def initialize(self, io_loop=None): self.io_loop = io_loop or IOLoop.current() self.channel = pycares.Channel(sock_state_cb=self._sock_state_cb) self.fds = {} def _sock_state_cb(self, fd, readable, writable): state = ((IOLoop.READ if readable else 0) | (IOLoop.WRITE if writable else 0)) if not state: self.io_loop.remove_handler(fd) del self.fds[fd] elif fd in self.fds: self.io_loop.update_handler(fd, state) self.fds[fd] = state else: self.io_loop.add_handler(fd, self._handle_events, state) self.fds[fd] = state def _handle_events(self, fd, events): read_fd = pycares.ARES_SOCKET_BAD write_fd = pycares.ARES_SOCKET_BAD if events & IOLoop.READ: read_fd = fd if events & IOLoop.WRITE: write_fd = fd self.channel.process_fd(read_fd, write_fd) @gen.coroutine def resolve(self, host, port, family=0): if is_valid_ip(host): addresses = [host] else: # gethostbyname doesn't take callback as a kwarg self.channel.gethostbyname(host, family, (yield gen.Callback(1))) callback_args = yield gen.Wait(1) assert isinstance(callback_args, gen.Arguments) assert not callback_args.kwargs result, error = callback_args.args if error: raise Exception('C-Ares returned error %s: %s while resolving %s' % (error, pycares.errno.strerror(error), host)) addresses = result.addresses addrinfo = [] for address in addresses: if '.' in address: address_family = socket.AF_INET elif ':' in address: address_family = socket.AF_INET6 else: address_family = socket.AF_UNSPEC if family != socket.AF_UNSPEC and family != address_family: raise Exception('Requested socket family %d but got %d' % (family, address_family)) addrinfo.append((address_family, (address, port))) raise gen.Return(addrinfo)
mit
morelab/weblabdeusto
server/src/voodoo/sessions/sqlalchemy_data.py
3
1209
#-*-*- encoding: utf-8 -*-*- # # Copyright (C) 2005 onwards University of Deusto # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # # This software consists of contributions made by many individuals, # listed below: # # Author: Pablo Orduña <pablo@ordunya.com> # from __future__ import print_function, unicode_literals from sqlalchemy import Column, String, DateTime, LargeBinary from sqlalchemy.ext.declarative import declarative_base SessionBase = declarative_base() class Session(SessionBase): __tablename__ = 'Sessions' sess_id = Column(String(100), primary_key = True) session_pool_id = Column(String(100), nullable = False) start_date = Column(DateTime(), nullable = False) latest_access = Column(DateTime()) latest_change = Column(DateTime()) session_obj = Column(LargeBinary(), nullable = False) def __init__(self, sess_id, session_pool_id, start_date, session_obj): self.sess_id = sess_id self.session_pool_id = session_pool_id self.start_date = start_date self.session_obj = session_obj
bsd-2-clause
juanyaw/PTVS
Python/Product/Django/Templates/Projects/DjangoProject/settings.py
36
5582
""" Django settings for $safeprojectname$ project. """ DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) MANAGERS = ADMINS DATABASES = { 'default': { # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'ENGINE': 'django.db.backends.', # Or path to database file if using sqlite3. 'NAME': '', # Not used with sqlite3. 'USER': '', # Not used with sqlite3. 'PASSWORD': '', # Set to empty string for localhost. Not used with sqlite3. 'HOST': '', # Set to empty string for default. Not used with sqlite3. 'PORT': '', } } # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'America/Chicago' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = '' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" STATIC_ROOT = '' # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = '/static/' # Additional locations of static files STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = 'n(bd1f1c%e8=_xad02x5qtfn%wgwpi492e$8_erx+d)!tpeoim' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # Uncomment the next line for simple clickjacking protection: # 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = '$safeprojectname$.urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = '$safeprojectname$.wsgi.application' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or # "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', # Uncomment the next line to enable the admin: # 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', ) # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } # Specify the default test runner. TEST_RUNNER = 'django.test.runner.DiscoverRunner'
apache-2.0
walbert947/ansible-modules-core
system/authorized_key.py
9
16360
#!/usr/bin/python # -*- coding: utf-8 -*- """ Ansible module to add authorized_keys for ssh logins. (c) 2012, Brad Olson <brado@movedbylight.com> This file is part of Ansible Ansible is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Ansible is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Ansible. If not, see <http://www.gnu.org/licenses/>. """ DOCUMENTATION = ''' --- module: authorized_key short_description: Adds or removes an SSH authorized key description: - "Adds or removes SSH authorized keys for particular user accounts" version_added: "0.5" options: user: description: - The username on the remote host whose authorized_keys file will be modified required: true key: description: - The SSH public key(s), as a string or (since 1.9) url (https://github.com/username.keys) required: true path: description: - Alternate path to the authorized_keys file required: false default: "(homedir)+/.ssh/authorized_keys" version_added: "1.2" manage_dir: description: - Whether this module should manage the directory of the authorized key file. If set, the module will create the directory, as well as set the owner and permissions of an existing directory. Be sure to set C(manage_dir=no) if you are using an alternate directory for authorized_keys, as set with C(path), since you could lock yourself out of SSH access. See the example below. required: false choices: [ "yes", "no" ] default: "yes" version_added: "1.2" state: description: - Whether the given key (with the given key_options) should or should not be in the file required: false choices: [ "present", "absent" ] default: "present" key_options: description: - A string of ssh key options to be prepended to the key in the authorized_keys file required: false default: null version_added: "1.4" exclusive: description: - Whether to remove all other non-specified keys from the authorized_keys file. Multiple keys can be specified in a single C(key) string value by separating them by newlines. - This option is not loop aware, so if you use C(with_) , it will be exclusive per iteration of the loop, if you want multiple keys in the file you need to pass them all to C(key) in a single batch as mentioned above. required: false choices: [ "yes", "no" ] default: "no" version_added: "1.9" validate_certs: description: - This only applies if using a https url as the source of the keys. If set to C(no), the SSL certificates will not be validated. - This should only set to C(no) used on personally controlled sites using self-signed certificates as it avoids verifying the source site. - Prior to 2.1 the code worked as if this was set to C(yes). required: false default: "yes" choices: ["yes", "no"] version_added: "2.1" author: "Ansible Core Team" ''' EXAMPLES = ''' # Example using key data from a local file on the management machine - authorized_key: user=charlie key="{{ lookup('file', '/home/charlie/.ssh/id_rsa.pub') }}" # Using github url as key source - authorized_key: user=charlie key=https://github.com/charlie.keys # Using alternate directory locations: - authorized_key: user: charlie key: "{{ lookup('file', '/home/charlie/.ssh/id_rsa.pub') }}" path: '/etc/ssh/authorized_keys/charlie' manage_dir: no # Using with_file - name: Set up authorized_keys for the deploy user authorized_key: user=deploy key="{{ item }}" with_file: - public_keys/doe-jane - public_keys/doe-john # Using key_options: - authorized_key: user: charlie key: "{{ lookup('file', '/home/charlie/.ssh/id_rsa.pub') }}" key_options: 'no-port-forwarding,from="10.0.1.1"' # Using validate_certs: - authorized_key: user=charlie key=https://github.com/user.keys validate_certs=no # Set up authorized_keys exclusively with one key - authorized_key: user=root key="{{ item }}" state=present exclusive=yes with_file: - public_keys/doe-jane ''' # Makes sure the public key line is present or absent in the user's .ssh/authorized_keys. # # Arguments # ========= # user = username # key = line to add to authorized_keys for user # path = path to the user's authorized_keys file (default: ~/.ssh/authorized_keys) # manage_dir = whether to create, and control ownership of the directory (default: true) # state = absent|present (default: present) # # see example in examples/playbooks import sys import os import pwd import os.path import tempfile import re import shlex class keydict(dict): """ a dictionary that maintains the order of keys as they are added """ # http://stackoverflow.com/questions/2328235/pythonextend-the-dict-class def __init__(self, *args, **kw): super(keydict,self).__init__(*args, **kw) self.itemlist = super(keydict,self).keys() def __setitem__(self, key, value): self.itemlist.append(key) super(keydict,self).__setitem__(key, value) def __iter__(self): return iter(self.itemlist) def keys(self): return self.itemlist def values(self): return [self[key] for key in self] def itervalues(self): return (self[key] for key in self) def keyfile(module, user, write=False, path=None, manage_dir=True): """ Calculate name of authorized keys file, optionally creating the directories and file, properly setting permissions. :param str user: name of user in passwd file :param bool write: if True, write changes to authorized_keys file (creating directories if needed) :param str path: if not None, use provided path rather than default of '~user/.ssh/authorized_keys' :param bool manage_dir: if True, create and set ownership of the parent dir of the authorized_keys file :return: full path string to authorized_keys for user """ if module.check_mode and path is not None: keysfile = path return keysfile try: user_entry = pwd.getpwnam(user) except KeyError, e: if module.check_mode and path is None: module.fail_json(msg="Either user must exist or you must provide full path to key file in check mode") module.fail_json(msg="Failed to lookup user %s: %s" % (user, str(e))) if path is None: homedir = user_entry.pw_dir sshdir = os.path.join(homedir, ".ssh") keysfile = os.path.join(sshdir, "authorized_keys") else: sshdir = os.path.dirname(path) keysfile = path if not write: return keysfile uid = user_entry.pw_uid gid = user_entry.pw_gid if manage_dir: if not os.path.exists(sshdir): os.mkdir(sshdir, 0700) if module.selinux_enabled(): module.set_default_selinux_context(sshdir, False) os.chown(sshdir, uid, gid) os.chmod(sshdir, 0700) if not os.path.exists(keysfile): basedir = os.path.dirname(keysfile) if not os.path.exists(basedir): os.makedirs(basedir) try: f = open(keysfile, "w") #touches file so we can set ownership and perms finally: f.close() if module.selinux_enabled(): module.set_default_selinux_context(keysfile, False) try: os.chown(keysfile, uid, gid) os.chmod(keysfile, 0600) except OSError: pass return keysfile def parseoptions(module, options): ''' reads a string containing ssh-key options and returns a dictionary of those options ''' options_dict = keydict() #ordered dict if options: try: # the following regex will split on commas while # ignoring those commas that fall within quotes regex = re.compile(r'''((?:[^,"']|"[^"]*"|'[^']*')+)''') parts = regex.split(options)[1:-1] for part in parts: if "=" in part: (key, value) = part.split("=", 1) options_dict[key] = value elif part != ",": options_dict[part] = None except: module.fail_json(msg="invalid option string: %s" % options) return options_dict def parsekey(module, raw_key): ''' parses a key, which may or may not contain a list of ssh-key options at the beginning ''' VALID_SSH2_KEY_TYPES = [ 'ssh-ed25519', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-dss', 'ssh-rsa', ] options = None # connection options key = None # encrypted key string key_type = None # type of ssh key type_index = None # index of keytype in key string|list # remove comment yaml escapes raw_key = raw_key.replace('\#', '#') # split key safely lex = shlex.shlex(raw_key) lex.quotes = [] lex.commenters = '' #keep comment hashes lex.whitespace_split = True key_parts = list(lex) for i in range(0, len(key_parts)): if key_parts[i] in VALID_SSH2_KEY_TYPES: type_index = i key_type = key_parts[i] break # check for options if type_index is None: return None elif type_index > 0: options = " ".join(key_parts[:type_index]) # parse the options (if any) options = parseoptions(module, options) # get key after the type index key = key_parts[(type_index + 1)] # set comment to everything after the key if len(key_parts) > (type_index + 1): comment = " ".join(key_parts[(type_index + 2):]) return (key, key_type, options, comment) def readkeys(module, filename): if not os.path.isfile(filename): return {} keys = {} f = open(filename) for line in f.readlines(): key_data = parsekey(module, line) if key_data: # use key as identifier keys[key_data[0]] = key_data else: # for an invalid line, just append the line # to the array so it will be re-output later keys[line] = line f.close() return keys def writekeys(module, filename, keys): fd, tmp_path = tempfile.mkstemp('', 'tmp', os.path.dirname(filename)) f = open(tmp_path,"w") try: for index, key in keys.items(): try: (keyhash,type,options,comment) = key option_str = "" if options: option_strings = [] for option_key in options.keys(): if options[option_key]: option_strings.append("%s=%s" % (option_key, options[option_key])) else: option_strings.append("%s" % option_key) option_str = ",".join(option_strings) option_str += " " key_line = "%s%s %s %s\n" % (option_str, type, keyhash, comment) except: key_line = key f.writelines(key_line) except IOError, e: module.fail_json(msg="Failed to write to file %s: %s" % (tmp_path, str(e))) f.close() module.atomic_move(tmp_path, filename) def enforce_state(module, params): """ Add or remove key. """ user = params["user"] key = params["key"] path = params.get("path", None) manage_dir = params.get("manage_dir", True) state = params.get("state", "present") key_options = params.get("key_options", None) exclusive = params.get("exclusive", False) validate_certs = params.get("validate_certs", True) error_msg = "Error getting key from: %s" # if the key is a url, request it and use it as key source if key.startswith("http"): try: resp, info = fetch_url(module, key) if info['status'] != 200: module.fail_json(msg=error_msg % key) else: key = resp.read() except Exception: module.fail_json(msg=error_msg % key) # extract individual keys into an array, skipping blank lines and comments key = [s for s in key.splitlines() if s and not s.startswith('#')] # check current state -- just get the filename, don't create file do_write = False params["keyfile"] = keyfile(module, user, do_write, path, manage_dir) existing_keys = readkeys(module, params["keyfile"]) # Add a place holder for keys that should exist in the state=present and # exclusive=true case keys_to_exist = [] # Check our new keys, if any of them exist we'll continue. for new_key in key: parsed_new_key = parsekey(module, new_key) if not parsed_new_key: module.fail_json(msg="invalid key specified: %s" % new_key) if key_options is not None: parsed_options = parseoptions(module, key_options) parsed_new_key = (parsed_new_key[0], parsed_new_key[1], parsed_options, parsed_new_key[3]) present = False matched = False non_matching_keys = [] if parsed_new_key[0] in existing_keys: present = True # Then we check if everything matches, including # the key type and options. If not, we append this # existing key to the non-matching list # We only want it to match everything when the state # is present if parsed_new_key != existing_keys[parsed_new_key[0]] and state == "present": non_matching_keys.append(existing_keys[parsed_new_key[0]]) else: matched = True # handle idempotent state=present if state=="present": keys_to_exist.append(parsed_new_key[0]) if len(non_matching_keys) > 0: for non_matching_key in non_matching_keys: if non_matching_key[0] in existing_keys: del existing_keys[non_matching_key[0]] do_write = True if not matched: existing_keys[parsed_new_key[0]] = parsed_new_key do_write = True elif state=="absent": if not matched: continue del existing_keys[parsed_new_key[0]] do_write = True # remove all other keys to honor exclusive if state == "present" and exclusive: to_remove = frozenset(existing_keys).difference(keys_to_exist) for key in to_remove: del existing_keys[key] do_write = True if do_write: if module.check_mode: module.exit_json(changed=True) writekeys(module, keyfile(module, user, do_write, path, manage_dir), existing_keys) params['changed'] = True else: if module.check_mode: module.exit_json(changed=False) return params def main(): module = AnsibleModule( argument_spec = dict( user = dict(required=True, type='str'), key = dict(required=True, type='str'), path = dict(required=False, type='str'), manage_dir = dict(required=False, type='bool', default=True), state = dict(default='present', choices=['absent','present']), key_options = dict(required=False, type='str'), unique = dict(default=False, type='bool'), exclusive = dict(default=False, type='bool'), validate_certs = dict(default=True, type='bool'), ), supports_check_mode=True ) results = enforce_state(module, module.params) module.exit_json(**results) # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.urls import * main()
gpl-3.0
rarbg/ZeroNet
src/lib/pybitcointools/bitcoin/main.py
20
15353
#!/usr/bin/python from .py2specials import * from .py3specials import * import binascii import hashlib import re import sys import os import base64 import time import random import hmac from .ripemd import * # Elliptic curve parameters (secp256k1) P = 2**256 - 2**32 - 977 N = 115792089237316195423570985008687907852837564279074904382605163141518161494337 A = 0 B = 7 Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240 Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424 G = (Gx, Gy) def change_curve(p, n, a, b, gx, gy): global P, N, A, B, Gx, Gy, G P, N, A, B, Gx, Gy = p, n, a, b, gx, gy G = (Gx, Gy) def getG(): return G # Extended Euclidean Algorithm def inv(a, n): if a == 0: return 0 lm, hm = 1, 0 low, high = a % n, n while low > 1: r = high//low nm, new = hm-lm*r, high-low*r lm, low, hm, high = nm, new, lm, low return lm % n # JSON access (for pybtctool convenience) def access(obj, prop): if isinstance(obj, dict): if prop in obj: return obj[prop] elif '.' in prop: return obj[float(prop)] else: return obj[int(prop)] else: return obj[int(prop)] def multiaccess(obj, prop): return [access(o, prop) for o in obj] def slice(obj, start=0, end=2**200): return obj[int(start):int(end)] def count(obj): return len(obj) _sum = sum def sum(obj): return _sum(obj) def isinf(p): return p[0] == 0 and p[1] == 0 def to_jacobian(p): o = (p[0], p[1], 1) return o def jacobian_double(p): if not p[1]: return (0, 0, 0) ysq = (p[1] ** 2) % P S = (4 * p[0] * ysq) % P M = (3 * p[0] ** 2 + A * p[2] ** 4) % P nx = (M**2 - 2 * S) % P ny = (M * (S - nx) - 8 * ysq ** 2) % P nz = (2 * p[1] * p[2]) % P return (nx, ny, nz) def jacobian_add(p, q): if not p[1]: return q if not q[1]: return p U1 = (p[0] * q[2] ** 2) % P U2 = (q[0] * p[2] ** 2) % P S1 = (p[1] * q[2] ** 3) % P S2 = (q[1] * p[2] ** 3) % P if U1 == U2: if S1 != S2: return (0, 0, 1) return jacobian_double(p) H = U2 - U1 R = S2 - S1 H2 = (H * H) % P H3 = (H * H2) % P U1H2 = (U1 * H2) % P nx = (R ** 2 - H3 - 2 * U1H2) % P ny = (R * (U1H2 - nx) - S1 * H3) % P nz = H * p[2] * q[2] return (nx, ny, nz) def from_jacobian(p): z = inv(p[2], P) return ((p[0] * z**2) % P, (p[1] * z**3) % P) def jacobian_multiply(a, n): if a[1] == 0 or n == 0: return (0, 0, 1) if n == 1: return a if n < 0 or n >= N: return jacobian_multiply(a, n % N) if (n % 2) == 0: return jacobian_double(jacobian_multiply(a, n//2)) if (n % 2) == 1: return jacobian_add(jacobian_double(jacobian_multiply(a, n//2)), a) def fast_multiply(a, n): return from_jacobian(jacobian_multiply(to_jacobian(a), n)) def fast_add(a, b): return from_jacobian(jacobian_add(to_jacobian(a), to_jacobian(b))) # Functions for handling pubkey and privkey formats def get_pubkey_format(pub): if is_python2: two = '\x02' three = '\x03' four = '\x04' else: two = 2 three = 3 four = 4 if isinstance(pub, (tuple, list)): return 'decimal' elif len(pub) == 65 and pub[0] == four: return 'bin' elif len(pub) == 130 and pub[0:2] == '04': return 'hex' elif len(pub) == 33 and pub[0] in [two, three]: return 'bin_compressed' elif len(pub) == 66 and pub[0:2] in ['02', '03']: return 'hex_compressed' elif len(pub) == 64: return 'bin_electrum' elif len(pub) == 128: return 'hex_electrum' else: raise Exception("Pubkey not in recognized format") def encode_pubkey(pub, formt): if not isinstance(pub, (tuple, list)): pub = decode_pubkey(pub) if formt == 'decimal': return pub elif formt == 'bin': return b'\x04' + encode(pub[0], 256, 32) + encode(pub[1], 256, 32) elif formt == 'bin_compressed': return from_int_to_byte(2+(pub[1] % 2)) + encode(pub[0], 256, 32) elif formt == 'hex': return '04' + encode(pub[0], 16, 64) + encode(pub[1], 16, 64) elif formt == 'hex_compressed': return '0'+str(2+(pub[1] % 2)) + encode(pub[0], 16, 64) elif formt == 'bin_electrum': return encode(pub[0], 256, 32) + encode(pub[1], 256, 32) elif formt == 'hex_electrum': return encode(pub[0], 16, 64) + encode(pub[1], 16, 64) else: raise Exception("Invalid format!") def decode_pubkey(pub, formt=None): if not formt: formt = get_pubkey_format(pub) if formt == 'decimal': return pub elif formt == 'bin': return (decode(pub[1:33], 256), decode(pub[33:65], 256)) elif formt == 'bin_compressed': x = decode(pub[1:33], 256) beta = pow(int(x*x*x+A*x+B), int((P+1)//4), int(P)) y = (P-beta) if ((beta + from_byte_to_int(pub[0])) % 2) else beta return (x, y) elif formt == 'hex': return (decode(pub[2:66], 16), decode(pub[66:130], 16)) elif formt == 'hex_compressed': return decode_pubkey(safe_from_hex(pub), 'bin_compressed') elif formt == 'bin_electrum': return (decode(pub[:32], 256), decode(pub[32:64], 256)) elif formt == 'hex_electrum': return (decode(pub[:64], 16), decode(pub[64:128], 16)) else: raise Exception("Invalid format!") def get_privkey_format(priv): if isinstance(priv, int_types): return 'decimal' elif len(priv) == 32: return 'bin' elif len(priv) == 33: return 'bin_compressed' elif len(priv) == 64: return 'hex' elif len(priv) == 66: return 'hex_compressed' else: bin_p = b58check_to_bin(priv) if len(bin_p) == 32: return 'wif' elif len(bin_p) == 33: return 'wif_compressed' else: raise Exception("WIF does not represent privkey") def encode_privkey(priv, formt, vbyte=0): if not isinstance(priv, int_types): return encode_privkey(decode_privkey(priv), formt, vbyte) if formt == 'decimal': return priv elif formt == 'bin': return encode(priv, 256, 32) elif formt == 'bin_compressed': return encode(priv, 256, 32)+b'\x01' elif formt == 'hex': return encode(priv, 16, 64) elif formt == 'hex_compressed': return encode(priv, 16, 64)+'01' elif formt == 'wif': return bin_to_b58check(encode(priv, 256, 32), 128+int(vbyte)) elif formt == 'wif_compressed': return bin_to_b58check(encode(priv, 256, 32)+b'\x01', 128+int(vbyte)) else: raise Exception("Invalid format!") def decode_privkey(priv,formt=None): if not formt: formt = get_privkey_format(priv) if formt == 'decimal': return priv elif formt == 'bin': return decode(priv, 256) elif formt == 'bin_compressed': return decode(priv[:32], 256) elif formt == 'hex': return decode(priv, 16) elif formt == 'hex_compressed': return decode(priv[:64], 16) elif formt == 'wif': return decode(b58check_to_bin(priv),256) elif formt == 'wif_compressed': return decode(b58check_to_bin(priv)[:32],256) else: raise Exception("WIF does not represent privkey") def add_pubkeys(p1, p2): f1, f2 = get_pubkey_format(p1), get_pubkey_format(p2) return encode_pubkey(fast_add(decode_pubkey(p1, f1), decode_pubkey(p2, f2)), f1) def add_privkeys(p1, p2): f1, f2 = get_privkey_format(p1), get_privkey_format(p2) return encode_privkey((decode_privkey(p1, f1) + decode_privkey(p2, f2)) % N, f1) def multiply(pubkey, privkey): f1, f2 = get_pubkey_format(pubkey), get_privkey_format(privkey) pubkey, privkey = decode_pubkey(pubkey, f1), decode_privkey(privkey, f2) # http://safecurves.cr.yp.to/twist.html if not isinf(pubkey) and (pubkey[0]**3+B-pubkey[1]*pubkey[1]) % P != 0: raise Exception("Point not on curve") return encode_pubkey(fast_multiply(pubkey, privkey), f1) def divide(pubkey, privkey): factor = inv(decode_privkey(privkey), N) return multiply(pubkey, factor) def compress(pubkey): f = get_pubkey_format(pubkey) if 'compressed' in f: return pubkey elif f == 'bin': return encode_pubkey(decode_pubkey(pubkey, f), 'bin_compressed') elif f == 'hex' or f == 'decimal': return encode_pubkey(decode_pubkey(pubkey, f), 'hex_compressed') def decompress(pubkey): f = get_pubkey_format(pubkey) if 'compressed' not in f: return pubkey elif f == 'bin_compressed': return encode_pubkey(decode_pubkey(pubkey, f), 'bin') elif f == 'hex_compressed' or f == 'decimal': return encode_pubkey(decode_pubkey(pubkey, f), 'hex') def privkey_to_pubkey(privkey): f = get_privkey_format(privkey) privkey = decode_privkey(privkey, f) if privkey >= N: raise Exception("Invalid privkey") if f in ['bin', 'bin_compressed', 'hex', 'hex_compressed', 'decimal']: return encode_pubkey(fast_multiply(G, privkey), f) else: return encode_pubkey(fast_multiply(G, privkey), f.replace('wif', 'hex')) privtopub = privkey_to_pubkey def privkey_to_address(priv, magicbyte=0): return pubkey_to_address(privkey_to_pubkey(priv), magicbyte) privtoaddr = privkey_to_address def neg_pubkey(pubkey): f = get_pubkey_format(pubkey) pubkey = decode_pubkey(pubkey, f) return encode_pubkey((pubkey[0], (P-pubkey[1]) % P), f) def neg_privkey(privkey): f = get_privkey_format(privkey) privkey = decode_privkey(privkey, f) return encode_privkey((N - privkey) % N, f) def subtract_pubkeys(p1, p2): f1, f2 = get_pubkey_format(p1), get_pubkey_format(p2) k2 = decode_pubkey(p2, f2) return encode_pubkey(fast_add(decode_pubkey(p1, f1), (k2[0], (P - k2[1]) % P)), f1) def subtract_privkeys(p1, p2): f1, f2 = get_privkey_format(p1), get_privkey_format(p2) k2 = decode_privkey(p2, f2) return encode_privkey((decode_privkey(p1, f1) - k2) % N, f1) # Hashes def bin_hash160(string): intermed = hashlib.sha256(string).digest() digest = '' try: digest = hashlib.new('ripemd160', intermed).digest() except: digest = RIPEMD160(intermed).digest() return digest def hash160(string): return safe_hexlify(bin_hash160(string)) def bin_sha256(string): binary_data = string if isinstance(string, bytes) else bytes(string, 'utf-8') return hashlib.sha256(binary_data).digest() def sha256(string): return bytes_to_hex_string(bin_sha256(string)) def bin_ripemd160(string): try: digest = hashlib.new('ripemd160', string).digest() except: digest = RIPEMD160(string).digest() return digest def ripemd160(string): return safe_hexlify(bin_ripemd160(string)) def bin_dbl_sha256(s): bytes_to_hash = from_string_to_bytes(s) return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest() def dbl_sha256(string): return safe_hexlify(bin_dbl_sha256(string)) def bin_slowsha(string): string = from_string_to_bytes(string) orig_input = string for i in range(100000): string = hashlib.sha256(string + orig_input).digest() return string def slowsha(string): return safe_hexlify(bin_slowsha(string)) def hash_to_int(x): if len(x) in [40, 64]: return decode(x, 16) return decode(x, 256) def num_to_var_int(x): x = int(x) if x < 253: return from_int_to_byte(x) elif x < 65536: return from_int_to_byte(253)+encode(x, 256, 2)[::-1] elif x < 4294967296: return from_int_to_byte(254) + encode(x, 256, 4)[::-1] else: return from_int_to_byte(255) + encode(x, 256, 8)[::-1] # WTF, Electrum? def electrum_sig_hash(message): padded = b"\x18Bitcoin Signed Message:\n" + num_to_var_int(len(message)) + from_string_to_bytes(message) return bin_dbl_sha256(padded) def random_key(): # Gotta be secure after that java.SecureRandom fiasco... entropy = random_string(32) \ + str(random.randrange(2**256)) \ + str(int(time.time() * 1000000)) return sha256(entropy) def random_electrum_seed(): entropy = os.urandom(32) \ + str(random.randrange(2**256)) \ + str(int(time.time() * 1000000)) return sha256(entropy)[:32] # Encodings def b58check_to_bin(inp): leadingzbytes = len(re.match('^1*', inp).group(0)) data = b'\x00' * leadingzbytes + changebase(inp, 58, 256) assert bin_dbl_sha256(data[:-4])[:4] == data[-4:] return data[1:-4] def get_version_byte(inp): leadingzbytes = len(re.match('^1*', inp).group(0)) data = b'\x00' * leadingzbytes + changebase(inp, 58, 256) assert bin_dbl_sha256(data[:-4])[:4] == data[-4:] return ord(data[0]) def hex_to_b58check(inp, magicbyte=0): return bin_to_b58check(binascii.unhexlify(inp), magicbyte) def b58check_to_hex(inp): return safe_hexlify(b58check_to_bin(inp)) def pubkey_to_address(pubkey, magicbyte=0): if isinstance(pubkey, (list, tuple)): pubkey = encode_pubkey(pubkey, 'bin') if len(pubkey) in [66, 130]: return bin_to_b58check( bin_hash160(binascii.unhexlify(pubkey)), magicbyte) return bin_to_b58check(bin_hash160(pubkey), magicbyte) pubtoaddr = pubkey_to_address # EDCSA def encode_sig(v, r, s): vb, rb, sb = from_int_to_byte(v), encode(r, 256), encode(s, 256) result = base64.b64encode(vb+b'\x00'*(32-len(rb))+rb+b'\x00'*(32-len(sb))+sb) return result if is_python2 else str(result, 'utf-8') def decode_sig(sig): bytez = base64.b64decode(sig) return from_byte_to_int(bytez[0]), decode(bytez[1:33], 256), decode(bytez[33:], 256) # https://tools.ietf.org/html/rfc6979#section-3.2 def deterministic_generate_k(msghash, priv): v = b'\x01' * 32 k = b'\x00' * 32 priv = encode_privkey(priv, 'bin') msghash = encode(hash_to_int(msghash), 256, 32) k = hmac.new(k, v+b'\x00'+priv+msghash, hashlib.sha256).digest() v = hmac.new(k, v, hashlib.sha256).digest() k = hmac.new(k, v+b'\x01'+priv+msghash, hashlib.sha256).digest() v = hmac.new(k, v, hashlib.sha256).digest() return decode(hmac.new(k, v, hashlib.sha256).digest(), 256) def ecdsa_raw_sign(msghash, priv): z = hash_to_int(msghash) k = deterministic_generate_k(msghash, priv) r, y = fast_multiply(G, k) s = inv(k, N) * (z + r*decode_privkey(priv)) % N return 27+(y % 2), r, s def ecdsa_sign(msg, priv): return encode_sig(*ecdsa_raw_sign(electrum_sig_hash(msg), priv)) def ecdsa_raw_verify(msghash, vrs, pub): v, r, s = vrs w = inv(s, N) z = hash_to_int(msghash) u1, u2 = z*w % N, r*w % N x, y = fast_add(fast_multiply(G, u1), fast_multiply(decode_pubkey(pub), u2)) return r == x def ecdsa_verify(msg, sig, pub): return ecdsa_raw_verify(electrum_sig_hash(msg), decode_sig(sig), pub) def ecdsa_raw_recover(msghash, vrs): v, r, s = vrs x = r beta = pow(x*x*x+A*x+B, (P+1)//4, P) y = beta if v % 2 ^ beta % 2 else (P - beta) z = hash_to_int(msghash) Gz = jacobian_multiply((Gx, Gy, 1), (N - z) % N) XY = jacobian_multiply((x, y, 1), s) Qr = jacobian_add(Gz, XY) Q = jacobian_multiply(Qr, inv(r, N)) Q = from_jacobian(Q) if ecdsa_raw_verify(msghash, vrs, Q): return Q return False def ecdsa_recover(msg, sig): return encode_pubkey(ecdsa_raw_recover(electrum_sig_hash(msg), decode_sig(sig)), 'hex')
gpl-2.0
dynaryu/inasafe
safe/impact_functions/impact_function_metadata.py
6
33608
# coding=utf-8 """ InaSAFE Disaster risk assessment tool developed by AusAid - **Impact Function Metadata** Contact : ole.moller.nielsen@gmail.com .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = 'ismail@kartoza.com' __revision__ = '$Format:%H$' __date__ = '14/03/14' __copyright__ = ('Copyright 2012, Australia Indonesia Facility for ' 'Disaster Reduction') import json from safe.common.utilities import add_to_list, get_list_key, is_key_exist from safe.definitions import ( layer_purpose_exposure, layer_purpose_hazard) class ImpactFunctionMetadata(object): """Abstract metadata class for an impact function. .. versionadded:: 2.1 There will be a concrete implementation of this interface which is specific to a single IF class. So anything returned (e.g. data_types) will only be relevant to the category/subcategories of the concrete implementation's IF. Example usage:: foo = IF() meta = IF.metadata bar = meta.allowed_subcategories('exposure') bar > [structure] .. note:: We already know that for an IF only hazard and exposure categories are allowed, so there is no explicit method for that (we could change that later). """ def __init__(self): """Constructor.""" pass @staticmethod def simplify_layer_constraint(layer_constraint): """Simplify layer constraint to layer_type and data_type only. :param layer_constraint: Dictionary that represents layer_constraint :type layer_constraint: dict :returns: Simple version of layer_constraint :rtype: dict """ simple_layer_constraint = { 'layer_type': layer_constraint['layer_type'], 'data_type': layer_constraint['data_type'], } return simple_layer_constraint @staticmethod def is_subset(element, container): """Check the membership of element from container. It will check based on the type. Only valid for string and list. :param element: Element that will be searched for in container. :type element: list, str :param container: Container that will be checked. :type container: list, str :returns: boolean of the membership :rtype: bool """ if isinstance(element, list): if isinstance(container, list): return set(element) <= set(container) else: if isinstance(container, list): return element in container else: return element == container return False @staticmethod def json(): """JSON representation of the metadata for this impact function. This is a static method. You can use it to get the raw json metadata for an impact function. Each concrete implementation of the metadata base class should implement this. Nothing else needs to be overridden from the base class unless you want to modify the default behaviour. :returns: A json document representing all the metadata for the concrete impact function. :rtype: json """ my_json = json.dumps(ImpactFunctionMetadata.as_dict()) return my_json @staticmethod def as_dict(): """Return metadata as a dictionary. This is a static method. You can use it to get the metadata in dictionary format for an impact function. Each concrete implementation of the metadata base class should implement this. Nothing else needs to be overridden from the base class unless you want to modify the default behaviour. :returns: A dictionary representing all the metadata for the concrete impact function. :rtype: dict """ raise NotImplementedError( 'You must implement this method in your concrete class.') @classmethod def allowed_subcategories(cls, category=None): """Get the list of allowed subcategories for a given category. :param category: Optional category which will be used to subset the allowed subcategories. If omitted, all supported subcategories will be returned (for both hazard and exposure). Default is None. :type category: str :returns: A list of strings is returned. :rtype: list """ result = [] if category is None: return cls.allowed_subcategories('exposure') + cls\ .allowed_subcategories('hazard') else: metadata_dict = cls.as_dict() categories = metadata_dict['layer_requirements'] result = add_to_list(result, categories[category]['%s_types' % category]) return result @classmethod def allowed_data_types(cls, subcategory): """Get the list of allowed data types for a subcategory. Example usage:: foo = IF() meta = IF.metadata ubar = meta.allowed_data_types('structure') ubar > ['polygon'] In the above example it does not show ‘numeric’ as the request is specific to the structure subcategory for that IF (using the IF declaration at the top of this file as the basis for IF()) Passing a subcategory is required otherwise the context of the data_type(s) would be ambiguous (i.e. whether they can be used as exposure or hazards). :param subcategory: Required subcategory which will be used to subset the allowed data_types. :type subcategory: str :returns: A list of one or more strings is returned. :rtype: list """ result = [] metadata_dict = cls.as_dict() categories = metadata_dict['categories'] if subcategory in [x['id'] for x in cls.allowed_subcategories( 'exposure')]: # implementation logic that returns the allowed data_types for # exposure layer with subcategory as passed in to this method layer_constraints = categories['exposure']['layer_constraints'] for layer_constraint in layer_constraints: result = add_to_list(result, layer_constraint['data_type']) elif subcategory in [x['id'] for x in cls.allowed_subcategories( 'hazard')]: # implementation logic that returns the allowed data_types for # hazard layer with subcategory as passed in to this method layer_constraints = categories['hazard']['layer_constraints'] for layer_constraint in layer_constraints: result = add_to_list(result, layer_constraint['data_type']) else: # raise Exception('Invalid subcategory.') # TODO (ismailsunni): create custom exception to catch since it # will called by all impact function pass return result @classmethod def is_disabled(cls): """Determine if an impact function is disable. Usually is used for checking whether an impact function is disabled or not. If there is no disabled keyword in the metadata, return False. :returns: Return True if the metadata disabled value is True. :rtype: bool """ try: metadata_dict = cls.as_dict() return metadata_dict.get('disabled', False) except AttributeError: return False @classmethod def is_valid(cls): """Check whether the metadata is valid or not. TODO(IS): Add comment explaining how we validate IF Metadata. :returns: True or False based on the validity of IF Metadata :rtype: bool """ metadata_dict = cls.as_dict() expected_metadata = { 'id': basestring, 'name': basestring, 'impact': basestring, 'title': basestring, 'author': basestring, 'date_implemented': basestring, 'overview': basestring, 'detailed_description': basestring, 'hazard_input': basestring, 'exposure_input': basestring, 'output': basestring, 'actions': basestring, 'limitations': list, # list of string 'citations': list, # list of string 'layer_requirements': dict } for key, value in expected_metadata.iteritems(): if key not in metadata_dict.keys(): return False, 'key %s not in metadata' % key if not isinstance(metadata_dict[key], value): message = 'key %s in metadata is not a %s, but %s ' % ( key, value, type(metadata_dict[key])) return False, message expected_layer_requirements_keys = ['hazard', 'exposure'] layer_requirements = metadata_dict['layer_requirements'] for key in expected_layer_requirements_keys: if key not in layer_requirements.keys(): return False, 'key %s is not in layer_requirements' % key expected_hazard_metadata = { 'layer_mode': dict, 'layer_geometries': list, 'hazard_categories': list, 'hazard_types': list, 'continuous_hazard_units': list, 'vector_hazard_classifications': list, 'raster_hazard_classifications': list, 'additional_keywords': list } hazard = layer_requirements['hazard'] for key, value in expected_hazard_metadata.iteritems(): if key not in hazard.keys(): return False, 'key %s is not in hazard' % key if not isinstance(hazard[key], value): message = 'key %s in hazard is not a %s, but %s ' % ( key, value, type(hazard[key])) return False, message expected_exposure_metadata = { 'layer_mode': dict, 'layer_geometries': list, 'exposure_types': list, 'exposure_units': list, 'exposure_class_fields': list, 'additional_keywords': list } exposure = layer_requirements['exposure'] for key, value in expected_exposure_metadata.iteritems(): if key not in exposure.keys(): return False, 'key %s is not in exposure' % key if not isinstance(exposure[key], value): message = 'key %s in exposure not a %s, but %s ' % ( key, value, type(exposure[key])) return False, message return True, '' @classmethod def parameters(cls): """Return list of parameters. This is a static method. You can use it to get the list of parameters for the impact function. :returns: A list that contains all parameters. :rtype: list """ return cls.as_dict().get('parameters', []) @classmethod def get_layer_requirements(cls): """Return layer requirements. This is a static method. You can use it to get the layer requirements for the impact function. :returns: A dict that contains layer requirements. :rtype: dict """ return cls.as_dict().get('layer_requirements', {}) @classmethod def get_name(cls): """Return IF name. :returns: The IF name. :rtype: str """ return cls.as_dict().get('name', '') @classmethod def get_hazard_requirements(cls): """Get hazard layer requirements.""" return cls.get_layer_requirements()['hazard'] @classmethod def get_exposure_requirements(cls): """Get exposure layer requirements.""" return cls.get_layer_requirements()['exposure'] @classmethod def purposes_for_layer(cls, layer_geometry_key): """Get purposes of a layer geometry id. :param layer_geometry_key: The geometry id :type layer_geometry_key: str :returns: List of purposes :rtype: list """ result = [] hazard_layer_req = cls.get_hazard_requirements() hazard_geometries = hazard_layer_req['layer_geometries'] hazard_geometry_keys = get_list_key(hazard_geometries) if layer_geometry_key in hazard_geometry_keys: result.append(layer_purpose_hazard) exposure_layer_req = cls.get_exposure_requirements() exposure_geometries = exposure_layer_req['layer_geometries'] exposure_geometry_keys = get_list_key(exposure_geometries) if layer_geometry_key in exposure_geometry_keys: result.append(layer_purpose_exposure) return result @classmethod def hazard_categories_for_layer(cls, layer_geometry_key, hazard_key=None): """Get hazard categories form layer_geometry_key :param layer_geometry_key: The geometry id :type layer_geometry_key: str :param hazard_key: The hazard key :type hazard_key: str :returns: List of hazard_categories :rtype: list """ hazard_layer_req = cls.get_hazard_requirements() hazards = hazard_layer_req['hazard_types'] hazard_geometries = hazard_layer_req['layer_geometries'] if not is_key_exist(layer_geometry_key, hazard_geometries): return [] if hazard_key: if not is_key_exist(hazard_key, hazards): return [] return hazard_layer_req['hazard_categories'] @classmethod def hazards_for_layer(cls, hazard_geometry_key, hazard_category_key=None): """Get hazard categories form layer_geometry_key :param hazard_geometry_key: The geometry id :type hazard_geometry_key: str :param hazard_category_key: The hazard category :type hazard_category_key: str :returns: List of hazard :rtype: list """ hazard_layer_req = cls.get_hazard_requirements() hazard_categories = hazard_layer_req['hazard_categories'] hazard_geometries = hazard_layer_req['layer_geometries'] if not is_key_exist(hazard_geometry_key, hazard_geometries): return [] if hazard_category_key: if not is_key_exist(hazard_category_key, hazard_categories): return [] return hazard_layer_req['hazard_types'] @classmethod def exposures_for_layer(cls, layer_geometry_key): """Get hazard categories form layer_geometry_key :param layer_geometry_key: The geometry id :type layer_geometry_key: str :returns: List of exposure :rtype: list """ exposure_layer_req = cls.get_exposure_requirements() layer_geometries = exposure_layer_req['layer_geometries'] layer_geometry_keys = get_list_key(layer_geometries) if layer_geometry_key in layer_geometry_keys: return exposure_layer_req['exposure_types'] else: return [] @classmethod def exposure_units_for_layer( cls, exposure_key, layer_geometry_key, layer_mode_key): """Get exposure units. :param exposure_key: The exposure key :type exposure_key: str :param layer_geometry_key: The geometry key :type layer_geometry_key: str :param layer_mode_key: The layer mode key :type layer_mode_key: str :returns: List of exposure unit :rtype: list """ exposure_layer_req = cls.get_exposure_requirements() if not exposure_layer_req['exposure_units']: return [] exposures = exposure_layer_req['exposure_types'] exposure_keys = get_list_key(exposures) if exposure_key not in exposure_keys: return [] layer_geometries = exposure_layer_req['layer_geometries'] layer_geometry_keys = get_list_key(layer_geometries) if layer_geometry_key not in layer_geometry_keys: return [] layer_mode = exposure_layer_req['layer_mode'] if layer_mode_key != layer_mode['key']: return [] return exposure_layer_req['exposure_units'] @classmethod def continuous_hazards_units_for_layer( cls, hazard_key, layer_geometry_key, layer_mode_key, hazard_category_key): """Get continuous hazard units. :param hazard_key: The hazard key :type hazard_key: str :param layer_geometry_key: The layer geometry key :type layer_geometry_key: str :param layer_mode_key: The layer mode key :type layer_mode_key: str :param hazard_category_key: The hazard category key :type hazard_category_key: str :returns: List of continuous hazard unit :rtype: list """ hazard_layer_req = cls.get_hazard_requirements() if not hazard_layer_req['continuous_hazard_units']: return [] hazards = hazard_layer_req['hazard_types'] hazard_keys = get_list_key(hazards) if hazard_key not in hazard_keys: return [] layer_geometries = hazard_layer_req['layer_geometries'] layer_geometry_keys = get_list_key(layer_geometries) if layer_geometry_key not in layer_geometry_keys: return [] layer_mode = hazard_layer_req['layer_mode'] if layer_mode_key != layer_mode['key']: return [] hazard_categories = hazard_layer_req['hazard_categories'] hazard_category_keys = get_list_key(hazard_categories) if hazard_category_key not in hazard_category_keys: return [] return hazard_layer_req['continuous_hazard_units'] @classmethod def vector_hazards_classifications_for_layer( cls, hazard_key, layer_geometry_key, layer_mode_key, hazard_category_key): """Get vector_hazards_classifications. :param hazard_key: The hazard key :type hazard_key: str :param layer_geometry_key: The layer geometry key :type layer_geometry_key: str :param layer_mode_key: The layer mode key :type layer_mode_key: str :param hazard_category_key: The hazard category key :type hazard_category_key: str :returns: List of continuous hazard unit :rtype: list """ hazard_layer_req = cls.get_hazard_requirements() if not hazard_layer_req['vector_hazard_classifications']: return [] hazards = hazard_layer_req['hazard_types'] hazard_keys = get_list_key(hazards) if hazard_key not in hazard_keys: return [] layer_geometries = hazard_layer_req['layer_geometries'] layer_geometry_keys = get_list_key(layer_geometries) if layer_geometry_key not in layer_geometry_keys: return [] layer_mode = hazard_layer_req['layer_mode'] if layer_mode_key != layer_mode['key']: return [] hazard_categories = hazard_layer_req['hazard_categories'] hazard_category_keys = get_list_key(hazard_categories) if hazard_category_key not in hazard_category_keys: return [] return hazard_layer_req['vector_hazard_classifications'] @classmethod def raster_hazards_classifications_for_layer( cls, hazard_key, layer_geometry_key, layer_mode_key, hazard_category_key): """Get vector_hazards_classifications. :param hazard_key: The hazard key :type hazard_key: str :param layer_geometry_key: The layer geometry key :type layer_geometry_key: str :param layer_mode_key: The layer mode key :type layer_mode_key: str :param hazard_category_key: The hazard category key :type hazard_category_key: str :returns: List of continuous hazard unit :rtype: list """ hazard_layer_req = cls.get_hazard_requirements() if not hazard_layer_req['raster_hazard_classifications']: return [] hazards = hazard_layer_req['hazard_types'] hazard_keys = get_list_key(hazards) if hazard_key not in hazard_keys: return [] layer_geometries = hazard_layer_req['layer_geometries'] layer_geometry_keys = get_list_key(layer_geometries) if layer_geometry_key not in layer_geometry_keys: return [] layer_mode = hazard_layer_req['layer_mode'] if layer_mode_key != layer_mode['key']: return [] hazard_categories = hazard_layer_req['hazard_categories'] hazard_category_keys = get_list_key(hazard_categories) if hazard_category_key not in hazard_category_keys: return [] return hazard_layer_req['raster_hazard_classifications'] @classmethod def available_hazards(cls, hazard_category_key): """Get available hazards from hazard_category_key :param hazard_category_key: The hazard category key :type hazard_category_key: str :returns: List of available hazards :rtype: list """ hazard_layer_req = cls.get_hazard_requirements() hazard_categories = hazard_layer_req['hazard_categories'] hazard_category_keys = get_list_key(hazard_categories) if hazard_category_key not in hazard_category_keys: return [] return hazard_layer_req['hazard_types'] @classmethod def available_exposures(cls): """get_available_exposure :returns: List of available exposure :rtype: list """ exposure_layer_req = cls.get_exposure_requirements() return exposure_layer_req['exposure_types'] @classmethod def is_function_for_constraint( cls, hazard_key, exposure_key, hazard_geometry_key=None, exposure_geometry_key=None, hazard_mode_key=None, exposure_mode_key=None): """Check if the constraints match with the function. :param hazard_key: The hazard key :type hazard_key: str :param exposure_key: the exposure key :type exposure_key: str :param hazard_geometry_key: The hazard geometry key :type hazard_geometry_key: str :param exposure_geometry_key: The exposure geometry key :type exposure_geometry_key: str :param hazard_mode_key: The hazard mode key :type hazard_mode_key: str :param exposure_mode_key: The exposure mode key :type exposure_mode_key: str :returns: True if match, else False :rtype: bool """ hazard_layer_req = cls.get_hazard_requirements() exposure_layer_req = cls.get_exposure_requirements() hazards = hazard_layer_req['hazard_types'] exposures = exposure_layer_req['exposure_types'] hazard_geometries = hazard_layer_req['layer_geometries'] exposure_geometries = exposure_layer_req['layer_geometries'] hazard_mode = hazard_layer_req['layer_mode'] exposure_mode = exposure_layer_req['layer_mode'] if not is_key_exist(hazard_key, hazards): return False if not is_key_exist(exposure_key, exposures): return False if hazard_geometry_key: if not is_key_exist(hazard_geometry_key, hazard_geometries): return False if exposure_geometry_key: if not is_key_exist(exposure_geometry_key, exposure_geometries): return False if hazard_mode_key: if hazard_mode_key != hazard_mode['key']: return False if exposure_mode_key: if exposure_mode_key != exposure_mode['key']: return False return True @classmethod def available_hazard_constraints(cls, hazard_key, hazard_category_key): """Get hazard constraints for hazard_key and hazard_category_key :param hazard_key: The hazard key :type hazard_key: str :param hazard_category_key: The hazard category key :type hazard_category_key: str :returns: List of tuple of layer_mode and layer_geometry :rtype: list """ hazard_layer_req = cls.get_hazard_requirements() hazards = hazard_layer_req['hazard_types'] hazard_categories = hazard_layer_req['hazard_categories'] if not is_key_exist(hazard_key, hazards): return [] if not is_key_exist(hazard_category_key, hazard_categories): return [] layer_mode = hazard_layer_req['layer_mode'] layer_geometries = hazard_layer_req['layer_geometries'] result = [] for layer_geometry in layer_geometries: result.append((layer_mode, layer_geometry)) return result @classmethod def available_exposure_constraints(cls, exposure_key): """Get exposure constraints for exposure_key. :param exposure_key: The exposure key :type exposure_key: str :returns: List of tuple of layer_mode and layer_geometry :rtype: list """ exposure_layer_req = cls.get_exposure_requirements() exposures = exposure_layer_req['exposure_types'] if not is_key_exist(exposure_key, exposures): return [] layer_mode = exposure_layer_req['layer_mode'] layer_geometries = exposure_layer_req['layer_geometries'] result = [] for layer_geometry in layer_geometries: result.append((layer_mode, layer_geometry)) return result @classmethod def valid_layer_keywords(cls): """Return a dictionary for valid layer keywords.""" hazard_layer_req = cls.get_hazard_requirements() exposure_layer_req = cls.get_exposure_requirements() hazard_keywords = { 'layer_mode': hazard_layer_req['layer_mode']['key'], 'layer_geometry': [x['key'] for x in hazard_layer_req[ 'layer_geometries']], 'hazard_category': [x['key'] for x in hazard_layer_req[ 'hazard_categories']], 'hazard': [x['key'] for x in hazard_layer_req[ 'hazard_types']], 'continuous_hazard_unit': [x['key'] for x in hazard_layer_req[ 'continuous_hazard_units']], 'vector_hazard_classification': [ x['key'] for x in hazard_layer_req[ 'vector_hazard_classifications']], 'raster_hazard_classification': [ x['key'] for x in hazard_layer_req[ 'raster_hazard_classifications']], } exposure_keywords = { 'layer_mode': exposure_layer_req['layer_mode']['key'], 'layer_geometry': [x['key'] for x in exposure_layer_req[ 'layer_geometries']], 'exposure': [x['key'] for x in exposure_layer_req[ 'exposure_types']], 'exposure_unit': [x['key'] for x in exposure_layer_req[ 'exposure_units']], } keywords = { 'hazard_keywords': hazard_keywords, 'exposure_keywords': exposure_keywords, } return keywords @classmethod def available_hazard_layer_mode( cls, hazard_key, hazard_geometry_key, hazard_category_key): """Return all available layer_mode. :param hazard_key: The hazard key :type hazard_key: str :param hazard_geometry_key: The hazard geometry key :type hazard_geometry_key: str :param hazard_category_key: The hazard category key :type hazard_category_key: str :returns: A layer mode :rtype: dict, None """ hazard_layer_req = cls.get_hazard_requirements() hazards = hazard_layer_req['hazard_types'] hazard_categories = hazard_layer_req['hazard_categories'] hazard_geometries = hazard_layer_req['layer_geometries'] if not is_key_exist(hazard_key, hazards): return None if not is_key_exist(hazard_geometry_key, hazard_geometries): return None if not is_key_exist(hazard_category_key, hazard_categories): return None layer_mode = hazard_layer_req['layer_mode'] return layer_mode @classmethod def available_exposure_layer_mode( cls, exposure_key, exposure_geometry_key): """Get exposure layer mode for exposure_key. :param exposure_key: The exposure key :type exposure_key: str :param exposure_geometry_key: The exposure geometry key :type exposure_geometry_key: str :returns: A layer mode :rtype: dict """ exposure_layer_req = cls.get_exposure_requirements() exposures = exposure_layer_req['exposure_types'] exposure_geometries = exposure_layer_req['layer_geometries'] if not is_key_exist(exposure_key, exposures): return None if not is_key_exist(exposure_geometry_key, exposure_geometries): return None layer_mode = exposure_layer_req['layer_mode'] return layer_mode @classmethod def hazard_additional_keywords( cls, layer_mode_key=None, layer_geometry_key=None, hazard_category_key=None, hazard_key=None): """Return additional_keywords for hazard. :param layer_mode_key: The layer mode key :type layer_mode_key: str :param layer_geometry_key: The layer geometry key :type layer_geometry_key: str :param hazard_category_key: The hazard category key :type hazard_category_key: str :param hazard_key: The hazard key :type hazard_key: str :returns: List of additional keywords :rtype: list """ hazard_layer_req = cls.get_hazard_requirements() layer_mode = hazard_layer_req['layer_mode'] layer_geometries = hazard_layer_req['layer_geometries'] hazard_categories = hazard_layer_req['hazard_categories'] hazards = hazard_layer_req['hazard_types'] if layer_mode_key: if layer_mode_key != layer_mode['key']: return [] if layer_geometry_key: if not is_key_exist(layer_geometry_key, layer_geometries): return [] if hazard_category_key: if not is_key_exist(hazard_category_key, hazard_categories): return [] if hazard_key: if not is_key_exist(hazard_key, hazards): return [] additional_keywords = hazard_layer_req['additional_keywords'] return additional_keywords @classmethod def exposure_additional_keywords( cls, layer_mode_key=None, layer_geometry_key=None, exposure_key=None): """Return additional_keywords for exposure. :param layer_mode_key: The layer mode key :type layer_mode_key: str :param layer_geometry_key: The layer geometry key :type layer_geometry_key: str :param exposure_key: The hazard key :type exposure_key: str :returns: List of additional keywords :rtype: list """ exposure_layer_req = cls.get_exposure_requirements() layer_mode = exposure_layer_req['layer_mode'] layer_geometries = exposure_layer_req['layer_geometries'] exposures = exposure_layer_req['exposure_types'] if layer_mode_key: if layer_mode_key != layer_mode['key']: return [] if layer_geometry_key: if not is_key_exist(layer_geometry_key, layer_geometries): return [] if exposure_key: if not is_key_exist(exposure_key, exposures): return [] additional_keywords = exposure_layer_req['additional_keywords'] return additional_keywords @classmethod def exposure_class_fields( cls, layer_mode_key=None, layer_geometry_key=None, exposure_key=None): """Return list of exposure class field. :param layer_mode_key: The layer mode key :type layer_mode_key: str :param layer_geometry_key: The layer geometry key :type layer_geometry_key: str :param exposure_key: The exposure key :type exposure_key: str :returns: List of exposure class field. :rtype: list """ exposure_layer_req = cls.get_exposure_requirements() layer_mode = exposure_layer_req['layer_mode'] layer_geometries = exposure_layer_req['layer_geometries'] exposures = exposure_layer_req['exposure_types'] if layer_mode_key: if layer_mode_key != layer_mode['key']: return [] if layer_geometry_key: if not is_key_exist(layer_geometry_key, layer_geometries): return [] if exposure_key: if not is_key_exist(exposure_key, exposures): return [] result = exposure_layer_req['exposure_class_fields'] return result
gpl-3.0
lwiecek/django
django/contrib/gis/measure.py
15
12471
# Copyright (c) 2007, Robert Coup <robert.coup@onetrackmind.co.nz> # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of Distance nor the names of its contributors may be used # to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # """ Distance and Area objects to allow for sensible and convenient calculation and conversions. Authors: Robert Coup, Justin Bronn, Riccardo Di Virgilio Inspired by GeoPy (http://exogen.case.edu/projects/geopy/) and Geoff Biggs' PhD work on dimensioned units for robotics. """ from decimal import Decimal from functools import total_ordering from django.utils import six __all__ = ['A', 'Area', 'D', 'Distance'] NUMERIC_TYPES = six.integer_types + (float, Decimal) AREA_PREFIX = "sq_" def pretty_name(obj): return obj.__name__ if obj.__class__ == type else obj.__class__.__name__ @total_ordering class MeasureBase(object): STANDARD_UNIT = None ALIAS = {} UNITS = {} LALIAS = {} def __init__(self, default_unit=None, **kwargs): value, self._default_unit = self.default_units(kwargs) setattr(self, self.STANDARD_UNIT, value) if default_unit and isinstance(default_unit, six.string_types): self._default_unit = default_unit def _get_standard(self): return getattr(self, self.STANDARD_UNIT) def _set_standard(self, value): setattr(self, self.STANDARD_UNIT, value) standard = property(_get_standard, _set_standard) def __getattr__(self, name): if name in self.UNITS: return self.standard / self.UNITS[name] else: raise AttributeError('Unknown unit type: %s' % name) def __repr__(self): return '%s(%s=%s)' % (pretty_name(self), self._default_unit, getattr(self, self._default_unit)) def __str__(self): return '%s %s' % (getattr(self, self._default_unit), self._default_unit) # **** Comparison methods **** def __eq__(self, other): if isinstance(other, self.__class__): return self.standard == other.standard else: return NotImplemented def __lt__(self, other): if isinstance(other, self.__class__): return self.standard < other.standard else: return NotImplemented # **** Operators methods **** def __add__(self, other): if isinstance(other, self.__class__): return self.__class__( default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard + other.standard)} ) else: raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)}) def __iadd__(self, other): if isinstance(other, self.__class__): self.standard += other.standard return self else: raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)}) def __sub__(self, other): if isinstance(other, self.__class__): return self.__class__( default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard - other.standard)} ) else: raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)}) def __isub__(self, other): if isinstance(other, self.__class__): self.standard -= other.standard return self else: raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)}) def __mul__(self, other): if isinstance(other, NUMERIC_TYPES): return self.__class__( default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard * other)} ) else: raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)}) def __imul__(self, other): if isinstance(other, NUMERIC_TYPES): self.standard *= float(other) return self else: raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)}) def __rmul__(self, other): return self * other def __truediv__(self, other): if isinstance(other, self.__class__): return self.standard / other.standard if isinstance(other, NUMERIC_TYPES): return self.__class__( default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard / other)} ) else: raise TypeError('%(class)s must be divided with number or %(class)s' % {"class": pretty_name(self)}) def __div__(self, other): # Python 2 compatibility return type(self).__truediv__(self, other) def __itruediv__(self, other): if isinstance(other, NUMERIC_TYPES): self.standard /= float(other) return self else: raise TypeError('%(class)s must be divided with number' % {"class": pretty_name(self)}) def __idiv__(self, other): # Python 2 compatibility return type(self).__itruediv__(self, other) def __bool__(self): return bool(self.standard) def __nonzero__(self): # Python 2 compatibility return type(self).__bool__(self) def default_units(self, kwargs): """ Return the unit value and the default units specified from the given keyword arguments dictionary. """ val = 0.0 default_unit = self.STANDARD_UNIT for unit, value in six.iteritems(kwargs): if not isinstance(value, float): value = float(value) if unit in self.UNITS: val += self.UNITS[unit] * value default_unit = unit elif unit in self.ALIAS: u = self.ALIAS[unit] val += self.UNITS[u] * value default_unit = u else: lower = unit.lower() if lower in self.UNITS: val += self.UNITS[lower] * value default_unit = lower elif lower in self.LALIAS: u = self.LALIAS[lower] val += self.UNITS[u] * value default_unit = u else: raise AttributeError('Unknown unit type: %s' % unit) return val, default_unit @classmethod def unit_attname(cls, unit_str): """ Retrieves the unit attribute name for the given unit string. For example, if the given unit string is 'metre', 'm' would be returned. An exception is raised if an attribute cannot be found. """ lower = unit_str.lower() if unit_str in cls.UNITS: return unit_str elif lower in cls.UNITS: return lower elif lower in cls.LALIAS: return cls.LALIAS[lower] else: raise Exception('Could not find a unit keyword associated with "%s"' % unit_str) class Distance(MeasureBase): STANDARD_UNIT = "m" UNITS = { 'chain': 20.1168, 'chain_benoit': 20.116782, 'chain_sears': 20.1167645, 'british_chain_benoit': 20.1167824944, 'british_chain_sears': 20.1167651216, 'british_chain_sears_truncated': 20.116756, 'cm': 0.01, 'british_ft': 0.304799471539, 'british_yd': 0.914398414616, 'clarke_ft': 0.3047972654, 'clarke_link': 0.201166195164, 'fathom': 1.8288, 'ft': 0.3048, 'german_m': 1.0000135965, 'gold_coast_ft': 0.304799710181508, 'indian_yd': 0.914398530744, 'inch': 0.0254, 'km': 1000.0, 'link': 0.201168, 'link_benoit': 0.20116782, 'link_sears': 0.20116765, 'm': 1.0, 'mi': 1609.344, 'mm': 0.001, 'nm': 1852.0, 'nm_uk': 1853.184, 'rod': 5.0292, 'sears_yd': 0.91439841, 'survey_ft': 0.304800609601, 'um': 0.000001, 'yd': 0.9144, } # Unit aliases for `UNIT` terms encountered in Spatial Reference WKT. ALIAS = { 'centimeter': 'cm', 'foot': 'ft', 'inches': 'inch', 'kilometer': 'km', 'kilometre': 'km', 'meter': 'm', 'metre': 'm', 'micrometer': 'um', 'micrometre': 'um', 'millimeter': 'mm', 'millimetre': 'mm', 'mile': 'mi', 'yard': 'yd', 'British chain (Benoit 1895 B)': 'british_chain_benoit', 'British chain (Sears 1922)': 'british_chain_sears', 'British chain (Sears 1922 truncated)': 'british_chain_sears_truncated', 'British foot (Sears 1922)': 'british_ft', 'British foot': 'british_ft', 'British yard (Sears 1922)': 'british_yd', 'British yard': 'british_yd', "Clarke's Foot": 'clarke_ft', "Clarke's link": 'clarke_link', 'Chain (Benoit)': 'chain_benoit', 'Chain (Sears)': 'chain_sears', 'Foot (International)': 'ft', 'German legal metre': 'german_m', 'Gold Coast foot': 'gold_coast_ft', 'Indian yard': 'indian_yd', 'Link (Benoit)': 'link_benoit', 'Link (Sears)': 'link_sears', 'Nautical Mile': 'nm', 'Nautical Mile (UK)': 'nm_uk', 'US survey foot': 'survey_ft', 'U.S. Foot': 'survey_ft', 'Yard (Indian)': 'indian_yd', 'Yard (Sears)': 'sears_yd' } LALIAS = {k.lower(): v for k, v in ALIAS.items()} def __mul__(self, other): if isinstance(other, self.__class__): return Area( default_unit=AREA_PREFIX + self._default_unit, **{AREA_PREFIX + self.STANDARD_UNIT: (self.standard * other.standard)} ) elif isinstance(other, NUMERIC_TYPES): return self.__class__( default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard * other)} ) else: raise TypeError('%(distance)s must be multiplied with number or %(distance)s' % { "distance": pretty_name(self.__class__), }) class Area(MeasureBase): STANDARD_UNIT = AREA_PREFIX + Distance.STANDARD_UNIT # Getting the square units values and the alias dictionary. UNITS = {'%s%s' % (AREA_PREFIX, k): v ** 2 for k, v in Distance.UNITS.items()} ALIAS = {k: '%s%s' % (AREA_PREFIX, v) for k, v in Distance.ALIAS.items()} LALIAS = {k.lower(): v for k, v in ALIAS.items()} def __truediv__(self, other): if isinstance(other, NUMERIC_TYPES): return self.__class__( default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard / other)} ) else: raise TypeError('%(class)s must be divided by a number' % {"class": pretty_name(self)}) def __div__(self, other): # Python 2 compatibility return type(self).__truediv__(self, other) # Shortcuts D = Distance A = Area
bsd-3-clause
hottwaj/django
django/utils/datastructures.py
394
9231
import copy from collections import OrderedDict from django.utils import six class OrderedSet(object): """ A set which keeps the ordering of the inserted items. Currently backs onto OrderedDict. """ def __init__(self, iterable=None): self.dict = OrderedDict(((x, None) for x in iterable) if iterable else []) def add(self, item): self.dict[item] = None def remove(self, item): del self.dict[item] def discard(self, item): try: self.remove(item) except KeyError: pass def __iter__(self): return iter(self.dict.keys()) def __contains__(self, item): return item in self.dict def __bool__(self): return bool(self.dict) def __nonzero__(self): # Python 2 compatibility return type(self).__bool__(self) def __len__(self): return len(self.dict) class MultiValueDictKeyError(KeyError): pass class MultiValueDict(dict): """ A subclass of dictionary customized to handle multiple values for the same key. >>> d = MultiValueDict({'name': ['Adrian', 'Simon'], 'position': ['Developer']}) >>> d['name'] 'Simon' >>> d.getlist('name') ['Adrian', 'Simon'] >>> d.getlist('doesnotexist') [] >>> d.getlist('doesnotexist', ['Adrian', 'Simon']) ['Adrian', 'Simon'] >>> d.get('lastname', 'nonexistent') 'nonexistent' >>> d.setlist('lastname', ['Holovaty', 'Willison']) This class exists to solve the irritating problem raised by cgi.parse_qs, which returns a list for every key, even though most Web forms submit single name-value pairs. """ def __init__(self, key_to_list_mapping=()): super(MultiValueDict, self).__init__(key_to_list_mapping) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, super(MultiValueDict, self).__repr__()) def __getitem__(self, key): """ Returns the last data value for this key, or [] if it's an empty list; raises KeyError if not found. """ try: list_ = super(MultiValueDict, self).__getitem__(key) except KeyError: raise MultiValueDictKeyError(repr(key)) try: return list_[-1] except IndexError: return [] def __setitem__(self, key, value): super(MultiValueDict, self).__setitem__(key, [value]) def __copy__(self): return self.__class__([ (k, v[:]) for k, v in self.lists() ]) def __deepcopy__(self, memo=None): if memo is None: memo = {} result = self.__class__() memo[id(self)] = result for key, value in dict.items(self): dict.__setitem__(result, copy.deepcopy(key, memo), copy.deepcopy(value, memo)) return result def __getstate__(self): obj_dict = self.__dict__.copy() obj_dict['_data'] = {k: self.getlist(k) for k in self} return obj_dict def __setstate__(self, obj_dict): data = obj_dict.pop('_data', {}) for k, v in data.items(): self.setlist(k, v) self.__dict__.update(obj_dict) def get(self, key, default=None): """ Returns the last data value for the passed key. If key doesn't exist or value is an empty list, then default is returned. """ try: val = self[key] except KeyError: return default if val == []: return default return val def getlist(self, key, default=None): """ Returns the list of values for the passed key. If key doesn't exist, then a default value is returned. """ try: return super(MultiValueDict, self).__getitem__(key) except KeyError: if default is None: return [] return default def setlist(self, key, list_): super(MultiValueDict, self).__setitem__(key, list_) def setdefault(self, key, default=None): if key not in self: self[key] = default # Do not return default here because __setitem__() may store # another value -- QueryDict.__setitem__() does. Look it up. return self[key] def setlistdefault(self, key, default_list=None): if key not in self: if default_list is None: default_list = [] self.setlist(key, default_list) # Do not return default_list here because setlist() may store # another value -- QueryDict.setlist() does. Look it up. return self.getlist(key) def appendlist(self, key, value): """Appends an item to the internal list associated with key.""" self.setlistdefault(key).append(value) def _iteritems(self): """ Yields (key, value) pairs, where value is the last item in the list associated with the key. """ for key in self: yield key, self[key] def _iterlists(self): """Yields (key, list) pairs.""" return six.iteritems(super(MultiValueDict, self)) def _itervalues(self): """Yield the last value on every key list.""" for key in self: yield self[key] if six.PY3: items = _iteritems lists = _iterlists values = _itervalues else: iteritems = _iteritems iterlists = _iterlists itervalues = _itervalues def items(self): return list(self.iteritems()) def lists(self): return list(self.iterlists()) def values(self): return list(self.itervalues()) def copy(self): """Returns a shallow copy of this object.""" return copy.copy(self) def update(self, *args, **kwargs): """ update() extends rather than replaces existing key lists. Also accepts keyword args. """ if len(args) > 1: raise TypeError("update expected at most 1 arguments, got %d" % len(args)) if args: other_dict = args[0] if isinstance(other_dict, MultiValueDict): for key, value_list in other_dict.lists(): self.setlistdefault(key).extend(value_list) else: try: for key, value in other_dict.items(): self.setlistdefault(key).append(value) except TypeError: raise ValueError("MultiValueDict.update() takes either a MultiValueDict or dictionary") for key, value in six.iteritems(kwargs): self.setlistdefault(key).append(value) def dict(self): """ Returns current object as a dict with singular values. """ return {key: self[key] for key in self} class ImmutableList(tuple): """ A tuple-like object that raises useful errors when it is asked to mutate. Example:: >>> a = ImmutableList(range(5), warning="You cannot mutate this.") >>> a[3] = '4' Traceback (most recent call last): ... AttributeError: You cannot mutate this. """ def __new__(cls, *args, **kwargs): if 'warning' in kwargs: warning = kwargs['warning'] del kwargs['warning'] else: warning = 'ImmutableList object is immutable.' self = tuple.__new__(cls, *args, **kwargs) self.warning = warning return self def complain(self, *wargs, **kwargs): if isinstance(self.warning, Exception): raise self.warning else: raise AttributeError(self.warning) # All list mutation functions complain. __delitem__ = complain __delslice__ = complain __iadd__ = complain __imul__ = complain __setitem__ = complain __setslice__ = complain append = complain extend = complain insert = complain pop = complain remove = complain sort = complain reverse = complain class DictWrapper(dict): """ Wraps accesses to a dictionary so that certain values (those starting with the specified prefix) are passed through a function before being returned. The prefix is removed before looking up the real value. Used by the SQL construction code to ensure that values are correctly quoted before being used. """ def __init__(self, data, func, prefix): super(DictWrapper, self).__init__(data) self.func = func self.prefix = prefix def __getitem__(self, key): """ Retrieves the real value after stripping the prefix string (if present). If the prefix is present, pass the value through self.func before returning, otherwise return the raw value. """ if key.startswith(self.prefix): use_func = True key = key[len(self.prefix):] else: use_func = False value = super(DictWrapper, self).__getitem__(key) if use_func: return self.func(value) return value
bsd-3-clause
nichung/wwwflaskBlogrevA
env/lib/python2.7/site-packages/setuptools/tests/test_dist_info.py
452
2615
"""Test .dist-info style distributions. """ import os import shutil import tempfile import unittest import textwrap try: import ast except: pass import pkg_resources from setuptools.tests.py26compat import skipIf def DALS(s): "dedent and left-strip" return textwrap.dedent(s).lstrip() class TestDistInfo(unittest.TestCase): def test_distinfo(self): dists = {} for d in pkg_resources.find_distributions(self.tmpdir): dists[d.project_name] = d assert len(dists) == 2, dists unversioned = dists['UnversionedDistribution'] versioned = dists['VersionedDistribution'] assert versioned.version == '2.718' # from filename assert unversioned.version == '0.3' # from METADATA @skipIf('ast' not in globals(), "ast is used to test conditional dependencies (Python >= 2.6)") def test_conditional_dependencies(self): requires = [pkg_resources.Requirement.parse('splort==4'), pkg_resources.Requirement.parse('quux>=1.1')] for d in pkg_resources.find_distributions(self.tmpdir): self.assertEqual(d.requires(), requires[:1]) self.assertEqual(d.requires(extras=('baz',)), requires) self.assertEqual(d.extras, ['baz']) def setUp(self): self.tmpdir = tempfile.mkdtemp() versioned = os.path.join(self.tmpdir, 'VersionedDistribution-2.718.dist-info') os.mkdir(versioned) metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+') try: metadata_file.write(DALS( """ Metadata-Version: 1.2 Name: VersionedDistribution Requires-Dist: splort (4) Provides-Extra: baz Requires-Dist: quux (>=1.1); extra == 'baz' """)) finally: metadata_file.close() unversioned = os.path.join(self.tmpdir, 'UnversionedDistribution.dist-info') os.mkdir(unversioned) metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+') try: metadata_file.write(DALS( """ Metadata-Version: 1.2 Name: UnversionedDistribution Version: 0.3 Requires-Dist: splort (==4) Provides-Extra: baz Requires-Dist: quux (>=1.1); extra == 'baz' """)) finally: metadata_file.close() def tearDown(self): shutil.rmtree(self.tmpdir)
mit
binhqnguyen/lena-local
src/visualizer/visualizer/plugins/olsr.py
182
3935
import gtk import ns.core import ns.network import ns.internet import ns.olsr from visualizer.base import InformationWindow class ShowOlsrRoutingTable(InformationWindow): ( COLUMN_DESTINATION, COLUMN_NEXT_HOP, COLUMN_INTERFACE, COLUMN_NUM_HOPS, ) = range(4) def __init__(self, visualizer, node_index): InformationWindow.__init__(self) self.win = gtk.Dialog(parent=visualizer.window, flags=gtk.DIALOG_DESTROY_WITH_PARENT|gtk.DIALOG_NO_SEPARATOR, buttons=(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE)) self.win.set_default_size(gtk.gdk.screen_width()/2, gtk.gdk.screen_height()/2) self.win.connect("response", self._response_cb) self.win.set_title("OLSR routing table for node %i" % node_index) self.visualizer = visualizer self.node_index = node_index self.table_model = gtk.ListStore(str, str, str, int) treeview = gtk.TreeView(self.table_model) treeview.show() sw = gtk.ScrolledWindow() sw.set_properties(hscrollbar_policy=gtk.POLICY_AUTOMATIC, vscrollbar_policy=gtk.POLICY_AUTOMATIC) sw.show() sw.add(treeview) self.win.vbox.add(sw) # Dest. column = gtk.TreeViewColumn('Destination', gtk.CellRendererText(), text=self.COLUMN_DESTINATION) treeview.append_column(column) # Next hop column = gtk.TreeViewColumn('Next hop', gtk.CellRendererText(), text=self.COLUMN_NEXT_HOP) treeview.append_column(column) # Interface column = gtk.TreeViewColumn('Interface', gtk.CellRendererText(), text=self.COLUMN_INTERFACE) treeview.append_column(column) # Num. Hops column = gtk.TreeViewColumn('Num. Hops', gtk.CellRendererText(), text=self.COLUMN_NUM_HOPS) treeview.append_column(column) self.visualizer.add_information_window(self) self.win.show() def _response_cb(self, win, response): self.win.destroy() self.visualizer.remove_information_window(self) def update(self): node = ns.network.NodeList.GetNode(self.node_index) olsr = node.GetObject(ns.olsr.olsr.RoutingProtocol.GetTypeId()) ipv4 = node.GetObject(ns.internet.Ipv4.GetTypeId()) if olsr is None: return self.table_model.clear() for route in olsr.GetRoutingTableEntries(): tree_iter = self.table_model.append() netdevice = ipv4.GetNetDevice(route.interface) if netdevice is None: interface_name = 'lo' else: interface_name = ns.core.Names.FindName(netdevice) if not interface_name: interface_name = "(interface %i)" % route.interface self.table_model.set(tree_iter, self.COLUMN_DESTINATION, str(route.destAddr), self.COLUMN_NEXT_HOP, str(route.nextAddr), self.COLUMN_INTERFACE, interface_name, self.COLUMN_NUM_HOPS, route.distance) def populate_node_menu(viz, node, menu): ns3_node = ns.network.NodeList.GetNode(node.node_index) olsr = ns3_node.GetObject(ns.olsr.olsr.RoutingProtocol.GetTypeId()) if olsr is None: print "No OLSR" return menu_item = gtk.MenuItem("Show OLSR Routing Table") menu_item.show() def _show_ipv4_routing_table(dummy_menu_item): ShowOlsrRoutingTable(viz, node.node_index) menu_item.connect("activate", _show_ipv4_routing_table) menu.add(menu_item) def register(viz): viz.connect("populate-node-menu", populate_node_menu)
gpl-2.0
JingheZ/shogun
examples/undocumented/python_modular/classifier_domainadaptationsvm_modular.py
17
3284
#!/usr/bin/env python import numpy from modshogun import StringCharFeatures, BinaryLabels, DNA from modshogun import WeightedDegreeStringKernel from modshogun import SVMLight, DomainAdaptationSVM, MSG_DEBUG traindna = ['CGCACGTACGTAGCTCGAT', 'CGACGTAGTCGTAGTCGTA', 'CGACGGGGGGGGGGTCGTA', 'CGACCTAGTCGTAGTCGTA', 'CGACCACAGTTATATAGTA', 'CGACGTAGTCGTAGTCGTA', 'CGACGTAGTTTTTTTCGTA', 'CGACGTAGTCGTAGCCCCA', 'CAAAAAAAAAAAAAAAATA', 'CGACGGGGGGGGGGGCGTA'] label_traindna = numpy.array(5*[-1.0] + 5*[1.0]) testdna = ['AGCACGTACGTAGCTCGAT', 'AGACGTAGTCGTAGTCGTA', 'CAACGGGGGGGGGGTCGTA', 'CGACCTAGTCGTAGTCGTA', 'CGAACACAGTTATATAGTA', 'CGACCTAGTCGTAGTCGTA', 'CGACGTGGGGTTTTTCGTA', 'CGACGTAGTCCCAGCCCCA', 'CAAAAAAAAAAAACCAATA', 'CGACGGCCGGGGGGGCGTA'] label_testdna = numpy.array(5*[-1.0] + 5*[1.0]) traindna2 = ['AGACAGTCAGTCGATAGCT', 'AGCAGTCGTAGTCGTAGTC', 'AGCAGGGGGGGGGGTAGTC', 'AGCAATCGTAGTCGTAGTC', 'AGCAACACGTTCTCTCGTC', 'AGCAGTCGTAGTCGTAGTC', 'AGCAGTCGTTTTTTTAGTC', 'AGCAGTCGTAGTCGAAAAC', 'ACCCCCCCCCCCCCCCCTC', 'AGCAGGGGGGGGGGGAGTC'] label_traindna2 = numpy.array(5*[-1.0] + 5*[1.0]) testdna2 = ['CGACAGTCAGTCGATAGCT', 'CGCAGTCGTAGTCGTAGTC', 'ACCAGGGGGGGGGGTAGTC', 'AGCAATCGTAGTCGTAGTC', 'AGCCACACGTTCTCTCGTC', 'AGCAATCGTAGTCGTAGTC', 'AGCAGTGGGGTTTTTAGTC', 'AGCAGTCGTAAACGAAAAC', 'ACCCCCCCCCCCCAACCTC', 'AGCAGGAAGGGGGGGAGTC'] label_testdna2 = numpy.array(5*[-1.0] + 5*[1.0]) parameter_list = [[traindna,testdna,label_traindna,label_testdna,traindna2,label_traindna2, \ testdna2,label_testdna2,1,3],[traindna,testdna,label_traindna,label_testdna,traindna2,label_traindna2, \ testdna2,label_testdna2,2,5]] def classifier_domainadaptationsvm_modular (fm_train_dna=traindna,fm_test_dna=testdna, \ label_train_dna=label_traindna, \ label_test_dna=label_testdna,fm_train_dna2=traindna2,fm_test_dna2=testdna2, \ label_train_dna2=label_traindna2,label_test_dna2=label_testdna2,C=1,degree=3): feats_train = StringCharFeatures(fm_train_dna, DNA) feats_test = StringCharFeatures(fm_test_dna, DNA) kernel = WeightedDegreeStringKernel(feats_train, feats_train, degree) labels = BinaryLabels(label_train_dna) svm = SVMLight(C, kernel, labels) svm.train() #svm.io.set_loglevel(MSG_DEBUG) ##################################### #print("obtaining DA SVM from previously trained SVM") feats_train2 = StringCharFeatures(fm_train_dna, DNA) feats_test2 = StringCharFeatures(fm_test_dna, DNA) kernel2 = WeightedDegreeStringKernel(feats_train, feats_train, degree) labels2 = BinaryLabels(label_train_dna) # we regularize against the previously obtained solution dasvm = DomainAdaptationSVM(C, kernel2, labels2, svm, 1.0) dasvm.train() out = dasvm.apply_binary(feats_test2) return out #,dasvm TODO if __name__=='__main__': print('SVMLight') classifier_domainadaptationsvm_modular(*parameter_list[0])
gpl-3.0
cloudbau/nova
nova/api/manager.py
27
1428
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova import manager from nova.network import driver class MetadataManager(manager.Manager): """Metadata Manager. This class manages the Metadata API service initialization. Currently, it just adds an iptables filter rule for the metadata service. """ def __init__(self, *args, **kwargs): super(MetadataManager, self).__init__(*args, **kwargs) self.network_driver = driver.load_network_driver() def init_host(self): """Perform any initialization. Currently, we only add an iptables filter rule for the metadata service. """ self.network_driver.metadata_accept()
apache-2.0
Comunitea/OCB
openerp/cli/scaffold.py
49
4380
#!/usr/bin/env python # -*- coding: utf-8 -*- import argparse import os import re import sys import jinja2 from . import Command from openerp.modules.module import (get_module_root, MANIFEST, load_information_from_description_file as load_manifest) class Scaffold(Command): """ Generates an Odoo module skeleton. """ def run(self, cmdargs): # TODO: bash completion file parser = argparse.ArgumentParser( prog="%s scaffold" % sys.argv[0].split(os.path.sep)[-1], description=self.__doc__, epilog=self.epilog(), ) parser.add_argument( '-t', '--template', type=template, default=template('default'), help="Use a custom module template, can be a template name or the" " path to a module template (default: %(default)s)") parser.add_argument('name', help="Name of the module to create") parser.add_argument( 'dest', default='.', nargs='?', help="Directory to create the module in (default: %(default)s)") if not cmdargs: sys.exit(parser.print_help()) args = parser.parse_args(args=cmdargs) args.template.render_to( snake(args.name), directory(args.dest, create=True), {'name': args.name}) def epilog(self): return "Built-in templates available are: %s" % ', '.join( d for d in os.listdir(builtins()) if d != 'base' ) builtins = lambda *args: os.path.join( os.path.abspath(os.path.dirname(__file__)), 'templates', *args) def snake(s): """ snake cases ``s`` :param str s: :return: str """ # insert a space before each uppercase character preceded by a # non-uppercase letter s = re.sub(r'(?<=[^A-Z])\B([A-Z])', r' \1', s) # lowercase everything, split on whitespace and join return '_'.join(s.lower().split()) def pascal(s): return ''.join( ss.capitalize() for ss in re.sub('[_\s]+', ' ', s).split() ) def directory(p, create=False): expanded = os.path.abspath( os.path.expanduser( os.path.expandvars(p))) if create and not os.path.exists(expanded): os.makedirs(expanded) if not os.path.isdir(expanded): die("%s is not a directory" % p) return expanded env = jinja2.Environment() env.filters['snake'] = snake env.filters['pascal'] = pascal class template(object): def __init__(self, identifier): # TODO: archives (zipfile, tarfile) self.id = identifier # is identifier a builtin? self.path = builtins(identifier) if os.path.isdir(self.path): return # is identifier a directory? self.path = identifier if os.path.isdir(self.path): return die("{} is not a valid module template".format(identifier)) def __str__(self): return self.id def files(self): """ Lists the (local) path and content of all files in the template """ for root, _, files in os.walk(self.path): for f in files: path = os.path.join(root, f) yield path, open(path, 'rb').read() def render_to(self, modname, directory, params=None): """ Render this module template to ``dest`` with the provided rendering parameters """ # overwrite with local for path, content in self.files(): local = os.path.relpath(path, self.path) # strip .template extension root, ext = os.path.splitext(local) if ext == '.template': local = root dest = os.path.join(directory, modname, local) destdir = os.path.dirname(dest) if not os.path.exists(destdir): os.makedirs(destdir) with open(dest, 'wb') as f: if ext not in ('.py', '.xml', '.csv', '.js', '.rst', '.html', '.template'): f.write(content) else: env.from_string(content.decode('utf-8'))\ .stream(params or {})\ .dump(f, encoding='utf-8') def die(message, code=1): print >>sys.stderr, message sys.exit(code) def warn(message): # ASK: shall we use logger ? print "WARNING: " + message
agpl-3.0
zlcnup/csmath
hw4_lm/lm.py
1
2784
# -*- coding: utf-8 -*- #!/usr/bin/enzl_v python from pylab import * from numpy import * from math import * def data_generator(N): #生成向量函数F:ai*exp(bi*x)的系数数组 zl_mean = [3.4,4.5] zl_cozl_v = [[1,0],[0,10]] zl_coff = np.random.multivariate_normal(zl_mean,zl_cozl_v,N) #生成观测值向量y x = np.random.uniform(1, N, N) y = [zl_coff[i][0]*exp(-zl_coff[i][1]*x[i]) for i in range(N)] #生成初始值x0 x0 = [x[i]+np.random.normal(0.0,0.3) for i in range(N)] return zl_coff, y, x0 def jacobian(zl_coff, x0, N): J=zeros((N,N),float) #计算第i个函数对X的第j个维度变量的偏导数 for i in range(N): for j in range(N): #-abexp(-b*xi) J[i][j] = -(zl_coff[i][0]*zl_coff[i][1])*exp(-(zl_coff[i][1]*x0[j])) return J def normG(g): absg = abs(g) Normg = absg.argmax() num = absg[Normg] return num def zl_LM(zl_coff, y, x0, N, maxIter): zl_numIter = 0 zl_v = 2 zl_miu = 0.05 #阻尼系数 x = x0 zl_Threshold = 1e-5 zl_preszl_fx = 100000 while zl_numIter < maxIter: zl_numIter += 1 #计算Jacobian矩阵 J = jacobian(zl_coff, x, N) #计算Hessian矩阵,Ep以及g值 A = dot(J.T,J) zl_fx = zeros((N,N),float) zl_fx = [zl_coff[i][0]*exp(-zl_coff[i][1]*x[i]) for i in range(N)] szl_fx = sum(array(zl_fx)*array(zl_fx)) Ep = array(y) - array(zl_fx) g = array(dot(J.T,Ep)) H = A + zl_miu*np.eye(N) DTp = solve(H, g) x = x + DTp zl_fx2 = zeros(N,float) for j in range(N): zl_fx2[j] = zl_coff[j][0]*exp(-zl_coff[j][1]) szl_fx2 = sum(array(zl_fx2)*array(zl_fx2)) if abs(szl_fx - zl_preszl_fx) < zl_Threshold: print("The zl_vector x is: ") print(x) print("The sum is: ") print(szl_fx2) break if szl_fx2 < (szl_fx+0.5*sum(array(g)*array(Ep))): zl_miu /= zl_v else : zl_miu *= 2 if zl_numIter == maxIter: print("The zl_vector x0 is: ") print(x0) print("The zl_vector x is: ") print(x) print("The sum is: ") print(szl_fx2) if __name__ == "__main__": #输入向量空间的长度N(在这里假设m=n) print("Please Input the dimension N of zl_vector space and the maxIter (the product of N and maxIter not be too large)") N = input("Input N (not be too large): ") N = int(N) maxIter = input("Input the max number of interation (larger than half of the N): ") maxIter = int(maxIter) zl_coff, y, x0 = data_generator(N) #zl_LM算法 zl_LM(zl_coff, y, x0, N, maxIter)
mit
misterdanb/micropython
tests/extmod/uctypes_sizeof_native.py
61
1036
import uctypes S1 = {} assert uctypes.sizeof(S1) == 0 S2 = {"a": uctypes.UINT8 | 0} assert uctypes.sizeof(S2) == 1 S3 = { "a": uctypes.UINT8 | 0, "b": uctypes.UINT8 | 1, } assert uctypes.sizeof(S3) == 2 S4 = { "a": uctypes.UINT8 | 0, "b": uctypes.UINT32 | 4, "c": uctypes.UINT8 | 8, } assert uctypes.sizeof(S4) == 12 S5 = { "a": uctypes.UINT8 | 0, "b": uctypes.UINT32 | 4, "c": uctypes.UINT8 | 8, "d": uctypes.UINT32 | 0, "sub": (4, { "b0": uctypes.UINT8 | 0, "b1": uctypes.UINT8 | 1, }), } assert uctypes.sizeof(S5) == 12 s5 = uctypes.struct(0, S5) assert uctypes.sizeof(s5) == 12 assert uctypes.sizeof(s5.sub) == 2 S6 = { "ptr": (uctypes.PTR | 0, uctypes.UINT8), } # As if there're no other arch bitnesses assert uctypes.sizeof(S6) in (4, 8) S7 = { "arr": (uctypes.ARRAY | 0, uctypes.UINT8 | 5), } assert uctypes.sizeof(S7) == 5 S8 = { "arr": (uctypes.ARRAY | 0, 3, {"a": uctypes.UINT32 | 0, "b": uctypes.UINT8 | 4}), } assert uctypes.sizeof(S8) == 24
mit
aoom/pattern
pattern/text/es/inflect.py
21
18687
#### PATTERN | ES | INFLECT ######################################################################## # -*- coding: utf-8 -*- # Copyright (c) 2012 University of Antwerp, Belgium # Author: Tom De Smedt <tom@organisms.be> # License: BSD (see LICENSE.txt for details). #################################################################################################### # Regular expressions-based rules for Spanish word inflection: # - pluralization and singularization of nouns, # - conjugation of verbs, # - predicative adjectives. # Accuracy: # 78% for pluralize() # 94% for singularize() # 81% for Verbs.find_lemma() (0.55 regular 87% + 0.45 irregular 74%) # 87% for Verbs.find_lexeme() (0.55 regular 99% + 0.45 irregular 72%) # 93% for predicative() import os import sys import re try: MODULE = os.path.dirname(os.path.realpath(__file__)) except: MODULE = "" sys.path.insert(0, os.path.join(MODULE, "..", "..", "..", "..")) from pattern.text import Verbs as _Verbs from pattern.text import ( INFINITIVE, PRESENT, PAST, FUTURE, CONDITIONAL, FIRST, SECOND, THIRD, SINGULAR, PLURAL, SG, PL, INDICATIVE, IMPERATIVE, SUBJUNCTIVE, IMPERFECTIVE, PERFECTIVE, PROGRESSIVE, IMPERFECT, PRETERITE, PARTICIPLE, GERUND ) sys.path.pop(0) VERB, NOUN, ADJECTIVE, ADVERB = "VB", "NN", "JJ", "RB" VOWELS = ("a", "e", "i", "o", "u") re_vowel = re.compile(r"a|e|i|o|u", re.I) is_vowel = lambda ch: ch in VOWELS def normalize(vowel): return {u"á":"a", u"é":"e", u"í":"i", u"ó":"o", u"ú":"u"}.get(vowel, vowel) #### ARTICLE ####################################################################################### # Spanish inflection of depends on gender and number. # Inflection gender. MASCULINE, FEMININE, NEUTER, PLURAL = \ MALE, FEMALE, NEUTRAL, PLURAL = \ M, F, N, PL = "m", "f", "n", "p" def definite_article(word, gender=MALE): """ Returns the definite article (el/la/los/las) for a given word. """ if MASCULINE in gender: return PLURAL in gender and "los" or "el" return PLURAL in gender and "las" or "la" def indefinite_article(word, gender=MALE): """ Returns the indefinite article (un/una/unos/unas) for a given word. """ if MASCULINE in gender: return PLURAL in gender and "unos" or "un" return PLURAL in gender and "unas" or "una" DEFINITE = "definite" INDEFINITE = "indefinite" def article(word, function=INDEFINITE, gender=MALE): """ Returns the indefinite (un) or definite (el) article for the given word. """ return function == DEFINITE \ and definite_article(word, gender) \ or indefinite_article(word, gender) _article = article def referenced(word, article=INDEFINITE, gender=MALE): """ Returns a string with the article + the word. """ return "%s %s" % (_article(word, article, gender), word) #### PLURALIZE ##################################################################################### plural_irregular = { u"mamá": u"mamás", u"papá": u"papás", u"sofá": u"sofás", u"dominó": u"dominós", } def pluralize(word, pos=NOUN, custom={}): """ Returns the plural of a given word. For example: gato => gatos. The custom dictionary is for user-defined replacements. """ if word in custom: return custom[word] w = word.lower() # Article: masculine el => los, feminine la => las. if w == "el": return "los" if w == "la": return "las" # Irregular inflections. if w in plural_irregular: return plural_irregular[w] # Words endings that are unlikely to inflect. if w.endswith(( "idad", "esis", "isis", "osis", "dica", u"grafía", u"logía")): return w # Words ending in a vowel get -s: gato => gatos. if w.endswith(VOWELS) or w.endswith(u"é"): return w + "s" # Words ending in a stressed vowel get -s: hindú => hindúes. if w.endswith((u"á", u"é", u"í", u"ó", u"ú")): return w + "es" # Words ending in -és get -eses: holandés => holandeses. if w.endswith(u"és"): return w[:-2] + "eses" # Words ending in -s preceded by an unstressed vowel: gafas => gafas. if w.endswith(u"s") and len(w) > 3 and is_vowel(w[-2]): return w # Words ending in -z get -ces: luz => luces if w.endswith(u"z"): return w[:-1] + "ces" # Words that change vowel stress: graduación => graduaciones. for a, b in ( (u"án", "anes"), (u"én", "enes"), (u"ín", "ines"), (u"ón", "ones"), (u"ún", "unes")): if w.endswith(a): return w[:-2] + b # Words ending in a consonant get -es. return w + "es" #print(pluralize(u"libro")) # libros #print(pluralize(u"señor")) # señores #print(pluralize(u"ley")) # leyes #print(pluralize(u"mes")) # meses #print(pluralize(u"luz")) # luces #print(pluralize(u"inglés")) # ingleses #print(pluralize(u"rubí")) # rubíes #print(pluralize(u"papá")) # papás #### SINGULARIZE ################################################################################### def singularize(word, pos=NOUN, custom={}): if word in custom: return custom[word] w = word.lower() # los gatos => el gato if pos == "DT": if w in ("la", "las", "los"): return "el" if w in ("una", "unas", "unos"): return "un" return w # hombres => hombre if w.endswith("es") and w[:-2].endswith(("br", "i", "j", "t", "zn")): return w[:-1] # gestiones => gestión for a, b in ( ("anes", u"án"), ("enes", u"én"), ("eses", u"és"), ("ines", u"ín"), ("ones", u"ón"), ("unes", u"ún")): if w.endswith(a): return w[:-4] + b # hipotesis => hipothesis if w.endswith(("esis", "isis", "osis")): return w # luces => luz if w.endswith("ces"): return w[:-3] + "z" # hospitales => hospital if w.endswith("es"): return w[:-2] # gatos => gato if w.endswith("s"): return w[:-1] return w #### VERB CONJUGATION ############################################################################## verb_irregular_inflections = [ (u"yéramos", "ir" ), ( "cisteis", "cer" ), ( "tuviera", "tener"), ( "ndieron", "nder" ), ( "ndiendo", "nder" ), (u"tándose", "tarse" ), ( "ndieran", "nder" ), ( "ndieras", "nder" ), (u"izaréis", "izar" ), ( "disteis", "der" ), ( "irtiera", "ertir"), ( "pusiera", "poner"), ( "endiste", "ender"), ( "laremos", "lar" ), (u"ndíamos", "nder" ), (u"icaréis", "icar" ), (u"dábamos", "dar" ), ( "intiera", "entir" ), ( "iquemos", "icar" ), (u"jéramos", "cir" ), ( "dierais", "der" ), ( "endiera", "ender" ), (u"iéndose", "erse" ), ( "jisteis", "cir" ), ( "cierais", "cer" ), (u"ecíamos", "ecer" ), ( u"áramos", "ar" ), ( u"ríamos", "r" ), ( u"éramos", "r" ), ( u"iríais", "ir" ), ( "temos", "tar" ), ( "steis", "r" ), ( "ciera", "cer" ), ( "erais", "r" ), ( "timos", "tir" ), ( "uemos", "ar" ), ( "tiera", "tir" ), ( "bimos", "bir" ), ( u"ciéis", "ciar" ), ( "gimos", "gir" ), ( "jiste", "cir" ), ( "mimos", "mir" ), ( u"guéis", "gar" ), ( u"stéis", "star" ), ( "jimos", "cir" ), ( u"inéis", "inar" ), ( "jemos", "jar" ), ( "tenga", "tener"), ( u"quéis", "car" ), ( u"bíais", "bir" ), ( "jeron", "cir" ), ( u"uíais", "uir" ), ( u"ntéis", "ntar" ), ( "jeras", "cir" ), ( "jeran", "cir" ), ( u"ducía", "ducir"), ( "yendo", "ir" ), ( "eemos", "ear" ), ( "ierta", "ertir"), ( "ierte", "ertir"), ( "nemos", "nar" ), ( u"ngáis", "ner" ), ( "liera", "ler" ), ( u"endió", "ender"), ( u"uyáis", "uir" ), ( "memos", "mar" ), ( "ciste", "cer" ), ( "ujera", "ucir" ), ( "uimos", "uir" ), ( "ienda", "ender" ), ( u"lléis", "llar" ), ( "iemos", "iar" ), ( "iende", "ender"), ( "rimos", "rir" ), ( "semos", "sar" ), ( u"itéis", "itar" ), ( u"gíais", "gir" ), ( u"ndáis", "nder" ), ( u"tíais", "tir" ), ( "demos", "dar" ), ( "lemos", "lar" ), ( "ponga", "poner" ), ( "yamos", "ir" ), ( u"icéis", "izar" ), ( "bais", "r" ), ( u"rías", "r" ), ( u"rían", "r" ), ( u"iría", "ir" ), ( "eran", "r" ), ( "eras", "r" ), ( u"irán", "ir" ), ( u"irás", "ir" ), ( "ongo", "oner" ), ( "aiga", "aer" ), ( u"ímos", "ir" ), ( u"ibía", "ibir" ), ( "diga", "decir"), ( u"edía", "edir" ), ( "orte", "ortar"), ( u"guió", "guir" ), ( "iega", "egar" ), ( "oren", "orar" ), ( "ores", "orar" ), ( u"léis", "lar" ), ( "irme", "irmar"), ( "siga", "seguir"), ( u"séis", "sar" ), ( u"stré", "strar"), ( "cien", "ciar" ), ( "cies", "ciar" ), ( "dujo", "ducir"), ( "eses", "esar" ), ( "esen", "esar" ), ( "coja", "coger" ), ( "lice", "lizar"), ( u"tías", "tir" ), ( u"tían", "tir" ), ( "pare", "parar" ), ( "gres", "grar" ), ( "gren", "grar" ), ( "tuvo", "tener"), ( u"uían", "uir" ), ( u"uías", "uir" ), ( "quen", "car" ), ( "ques", "car" ), ( u"téis", "tar" ), ( "iero", "erir" ), ( "iere", "erir" ), ( "uche", "uchar"), ( "tuve", "tener" ), ( "inen", "inar" ), ( "pire", "pirar"), ( u"reía", "reir" ), ( "uste", "ustar" ), ( u"ibió", "ibir" ), ( "duce", "ducir"), ( "icen", "izar" ), ( "ices", "izar" ), ( "ines", "inar" ), ( "ires", "irar" ), ( "iren", "irar" ), ( "duje", "ducir" ), ( "ille", "illar"), ( "urre", "urrir"), ( "tido", "tir" ), ( u"ndió", "nder" ), ( "uido", "uir" ), ( "uces", "ucir" ), ( "ucen", "ucir" ), ( u"iéis", "iar" ), ( u"eció", "ecer" ), ( u"jéis", "jar" ), ( "erve", "ervar"), ( "uyas", "uir" ), ( "uyan", "uir" ), ( u"tía", "tir" ), ( u"uía", "uir" ), ( "aos", "arse" ), ( "gue", "gar" ), ( u"qué", "car" ), ( "que", "car" ), ( "rse", "rse" ), ( "ste", "r" ), ( "era", "r" ), ( u"tió", "tir" ), ( "ine", "inar" ), ( u"ré", "r" ), ( "ya", "ir" ), ( "ye", "ir" ), ( u"tí", "tir" ), ( u"cé", "zar" ), ( "ie", "iar" ), ( "id", "ir" ), ( u"ué", "ar" ), ] class Verbs(_Verbs): def __init__(self): _Verbs.__init__(self, os.path.join(MODULE, "es-verbs.txt"), language = "es", default = {}, format = [ 0, 1, 2, 3, 4, 5, 6, 8, # indicativo presente 34, 35, 36, 37, 38, 39, 24, # indicativo pretérito 17, 18, 19, 20, 21, 22, # indicativo imperfecto 40, 41, 42, 43, 44, 45, # indicativo futuro 46, 47, 48, 49, 50, 51, # indicativo condicional 52, 54, # imperativo afirmativo 55, 56, 57, 58, 59, 60, # subjuntivo presente 67, 68, 69, 70, 71, 72 # subjuntivo imperfecto ]) def find_lemma(self, verb): """ Returns the base form of the given inflected verb, using a rule-based approach. """ # Spanish has 12,000+ verbs, ending in -ar (85%), -er (8%), -ir (7%). # Over 65% of -ar verbs (6500+) have a regular inflection. v = verb.lower() # Probably ends in -ir if preceding vowel in stem is -i. er_ir = lambda b: (len(b) > 2 and b[-2] == "i") and b+"ir" or b+"er" # Probably infinitive if ends in -ar, -er or -ir. if v.endswith(("ar", "er", "ir")): return v # Ruleset for irregular inflections adds 10% accuracy. for a, b in verb_irregular_inflections: if v.endswith(a): return v[:-len(a)] + b # reconozco => reconocer v = v.replace(u"zco", "ce") # reconozcamos => reconocer v = v.replace(u"zca", "ce") # reconozcáis => reconocer v = v.replace(u"zcá", "ce") # saldrár => saler if "ldr" in v: return v[:v.index("ldr")+1] + "er" # compondrán => componer if "ndr" in v: return v[:v.index("ndr")+1] + "er" # Many verbs end in -ar and have a regular inflection: for x in (( u"ando", u"ado", u"ad", # participle u"aré", u"arás", u"ará", u"aremos", u"aréis", u"arán", # future u"aría", u"arías", u"aríamos", u"aríais", u"arían", # conditional u"aba", u"abas", u"ábamos", u"abais", u"aban", # past imperfective u"é", u"aste", u"ó", u"asteis", u"aron", # past perfective u"ara", u"aras", u"áramos", u"arais", u"aran")): # past subjunctive if v.endswith(x): return v[:-len(x)] + "ar" # Many verbs end in -er and have a regular inflection: for x in (( u"iendo", u"ido", u"ed", # participle u"eré", u"erás", u"erá", u"eremos", u"eréis", u"erán", # future u"ería", u"erías", u"eríamos", u"eríais", u"erían", # conditional u"ía", u"ías", u"íamos", u"íais", u"ían", # past imperfective u"í", "iste", u"ió", "imos", "isteis", "ieron", # past perfective u"era", u"eras", u"éramos", u"erais", u"eran")): # past subjunctive if v.endswith(x): return er_ir(v[:-len(x)]) # Many verbs end in -ir and have a regular inflection: for x in (( u"iré", u"irás", u"irá", u"iremos", u"iréis", u"irán", # future u"iría", u"irías", u"iríamos", u"iríais", u"irían")): # past subjunctive if v.endswith(x): return v[:-len(x)] + "ir" # Present 1sg -o: yo hablo, como, vivo => hablar, comer, vivir. if v.endswith("o"): return v[:-1] + "ar" # Present 2sg, 3sg and 3pl: tú hablas. if v.endswith(("as", "a", "an")): return v.rstrip("sn")[:-1] + "ar" # Present 2sg, 3sg and 3pl: tú comes, tú vives. if v.endswith(("es", "e", "en")): return er_ir(v.rstrip("sn")[:-1]) # Present 1pl and 2pl: nosotros hablamos. for i, x in enumerate(( ("amos", u"áis"), ("emos", u"éis"), ("imos", u"ís"))): for x in x: if v.endswith(x): return v[:-len(x)] + ("ar", "er", "ir")[i] return v def find_lexeme(self, verb): """ For a regular verb (base form), returns the forms using a rule-based approach. """ v = verb.lower() if v.endswith(("arse", "erse", "irse")): # Reflexive verbs: calmarse (calmar) => me calmo. b = v[:-4] else: b = v[:-2] if v.endswith("ar") or not v.endswith(("er", "ir")): # Regular inflection for verbs ending in -ar. return [v, b+u"o", b+u"as", b+u"a", b+u"amos", b+u"áis", b+u"an", b+u"ando", b+u"é", b+u"aste", b+u"ó", b+u"amos", b+u"asteis", b+u"aron", b+u"ado", b+u"aba", b+u"abas", b+u"aba", b+u"ábamos", b+u"abais", b+u"aban", v+u"é", v+u"ás", v+u"á", v+u"emos", v+u"éis", v+u"án", v+u"ía", v+u"ías", v+u"ía", v+u"íamos", v+u"íais", v+u"ían", b+u"a", v[:-1]+"d", b+u"e", b+u"es", b+u"e", b+u"emos", b+u"éis", b+u"en", v+u"a", v+u"as", v+u"a", b+u"áramos", v+u"ais", v+u"an"] else: # Regular inflection for verbs ending in -er and -ir. p1, p2 = v.endswith("er") and ("e", u"é") or ("i","e") return [v, b+u"o", b+u"es", b+u"e", b+p1+u"mos", b+p2+u"is", b+u"en", b+u"iendo", b+u"í", b+u"iste", b+u"ió", b+u"imos", b+u"isteis", b+u"ieron", b+u"ido", b+u"ía", b+u"ías", b+u"ía", b+u"íamos", b+u"íais", b+u"ían", v+u"é", v+u"ás", v+u"á", v+u"emos", v+u"éis", v+u"án", v+u"ía", v+u"ías", v+u"ía", v+u"íamos", v+u"íais", v+u"ían", b+u"a", v[:-1]+"d", b+u"a", b+u"as", b+u"a", b+u"amos", b+u"áis", b+u"an", b+u"iera", b+u"ieras", b+u"iera", b+u"iéramos", b+u"ierais", b+u"ieran"] verbs = Verbs() conjugate, lemma, lexeme, tenses = \ verbs.conjugate, verbs.lemma, verbs.lexeme, verbs.tenses #### ATTRIBUTIVE & PREDICATIVE ##################################################################### def attributive(adjective, gender=MALE): w = adjective.lower() # normal => normales if PLURAL in gender and not is_vowel(w[-1:]): return w + "es" # el chico inteligente => los chicos inteligentes if PLURAL in gender and w.endswith(("a", "e")): return w + "s" # el chico alto => los chicos altos if w.endswith("o"): if FEMININE in gender and PLURAL in gender: return w[:-1] + "as" if FEMININE in gender: return w[:-1] + "a" if PLURAL in gender: return w + "s" return w #print(attributive("intelligente", gender=PLURAL)) # intelligentes #print(attributive("alto", gender=MALE+PLURAL)) # altos #print(attributive("alto", gender=FEMALE+PLURAL)) # altas #print(attributive("normal", gender=MALE)) # normal #print(attributive("normal", gender=FEMALE)) # normal #print(attributive("normal", gender=PLURAL)) # normales def predicative(adjective): """ Returns the predicative adjective (lowercase). In Spanish, the attributive form is always used for descriptive adjectives: "el chico alto" => masculine, "la chica alta" => feminine. The predicative is useful for lemmatization. """ w = adjective.lower() # histéricos => histérico if w.endswith(("os", "as")): w = w[:-1] # histérico => histérico if w.endswith("o"): return w # histérica => histérico if w.endswith("a"): return w[:-1] + "o" # horribles => horrible, humorales => humoral if w.endswith("es"): if len(w) >= 4 and not is_vowel(normalize(w[-3])) and not is_vowel(normalize(w[-4])): return w[:-1] return w[:-2] return w
bsd-3-clause
Gaia3D/QGIS
tests/src/python/test_qgsapplication.py
11
1100
"""QGIS Unit tests for QgsApplication. .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = 'Tim Sutton (tim@linfiniti.com)' __date__ = '20/01/2011' __copyright__ = 'Copyright 2012, The QGIS Project' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import qgis from utilities import getQgisTestApp, unittest QGISAPP, CANVAS, IFACE, PARENT = getQgisTestApp() class TestPyQgsApplication(unittest.TestCase): def testInvalidThemeName(self): """Check using an invalid theme will fallback to 'default'""" QGISAPP.setThemeName('fooobar') myExpectedResult = 'default' myResult = QGISAPP.themeName() myMessage = ('Expected:\n%s\nGot:\n%s\n' % (myExpectedResult, myResult)) assert myExpectedResult == myResult, myMessage if __name__ == '__main__': unittest.main()
gpl-2.0
mancoast/CPythonPyc_test
fail/332_test_shelve.py
34
5840
import unittest import shelve import glob from test import support from collections.abc import MutableMapping from test.test_dbm import dbm_iterator def L1(s): return s.decode("latin-1") class byteskeydict(MutableMapping): "Mapping that supports bytes keys" def __init__(self): self.d = {} def __getitem__(self, key): return self.d[L1(key)] def __setitem__(self, key, value): self.d[L1(key)] = value def __delitem__(self, key): del self.d[L1(key)] def __len__(self): return len(self.d) def iterkeys(self): for k in self.d.keys(): yield k.encode("latin-1") __iter__ = iterkeys def keys(self): return list(self.iterkeys()) def copy(self): return byteskeydict(self.d) class TestCase(unittest.TestCase): fn = "shelftemp.db" def tearDown(self): for f in glob.glob(self.fn+"*"): support.unlink(f) def test_close(self): d1 = {} s = shelve.Shelf(d1, protocol=2, writeback=False) s['key1'] = [1,2,3,4] self.assertEqual(s['key1'], [1,2,3,4]) self.assertEqual(len(s), 1) s.close() self.assertRaises(ValueError, len, s) try: s['key1'] except ValueError: pass else: self.fail('Closed shelf should not find a key') def test_ascii_file_shelf(self): s = shelve.open(self.fn, protocol=0) try: s['key1'] = (1,2,3,4) self.assertEqual(s['key1'], (1,2,3,4)) finally: s.close() def test_binary_file_shelf(self): s = shelve.open(self.fn, protocol=1) try: s['key1'] = (1,2,3,4) self.assertEqual(s['key1'], (1,2,3,4)) finally: s.close() def test_proto2_file_shelf(self): s = shelve.open(self.fn, protocol=2) try: s['key1'] = (1,2,3,4) self.assertEqual(s['key1'], (1,2,3,4)) finally: s.close() def test_in_memory_shelf(self): d1 = byteskeydict() s = shelve.Shelf(d1, protocol=0) s['key1'] = (1,2,3,4) self.assertEqual(s['key1'], (1,2,3,4)) s.close() d2 = byteskeydict() s = shelve.Shelf(d2, protocol=1) s['key1'] = (1,2,3,4) self.assertEqual(s['key1'], (1,2,3,4)) s.close() self.assertEqual(len(d1), 1) self.assertEqual(len(d2), 1) self.assertNotEqual(d1.items(), d2.items()) def test_mutable_entry(self): d1 = byteskeydict() s = shelve.Shelf(d1, protocol=2, writeback=False) s['key1'] = [1,2,3,4] self.assertEqual(s['key1'], [1,2,3,4]) s['key1'].append(5) self.assertEqual(s['key1'], [1,2,3,4]) s.close() d2 = byteskeydict() s = shelve.Shelf(d2, protocol=2, writeback=True) s['key1'] = [1,2,3,4] self.assertEqual(s['key1'], [1,2,3,4]) s['key1'].append(5) self.assertEqual(s['key1'], [1,2,3,4,5]) s.close() self.assertEqual(len(d1), 1) self.assertEqual(len(d2), 1) def test_keyencoding(self): d = {} key = 'Pöp' # the default keyencoding is utf-8 shelve.Shelf(d)[key] = [1] self.assertIn(key.encode('utf-8'), d) # but a different one can be given shelve.Shelf(d, keyencoding='latin-1')[key] = [1] self.assertIn(key.encode('latin-1'), d) # with all consequences s = shelve.Shelf(d, keyencoding='ascii') self.assertRaises(UnicodeEncodeError, s.__setitem__, key, [1]) def test_writeback_also_writes_immediately(self): # Issue 5754 d = {} key = 'key' encodedkey = key.encode('utf-8') s = shelve.Shelf(d, writeback=True) s[key] = [1] p1 = d[encodedkey] # Will give a KeyError if backing store not updated s['key'].append(2) s.close() p2 = d[encodedkey] self.assertNotEqual(p1, p2) # Write creates new object in store from test import mapping_tests class TestShelveBase(mapping_tests.BasicTestMappingProtocol): fn = "shelftemp.db" counter = 0 def __init__(self, *args, **kw): self._db = [] mapping_tests.BasicTestMappingProtocol.__init__(self, *args, **kw) type2test = shelve.Shelf def _reference(self): return {"key1":"value1", "key2":2, "key3":(1,2,3)} def _empty_mapping(self): if self._in_mem: x= shelve.Shelf(byteskeydict(), **self._args) else: self.counter+=1 x= shelve.open(self.fn+str(self.counter), **self._args) self._db.append(x) return x def tearDown(self): for db in self._db: db.close() self._db = [] if not self._in_mem: for f in glob.glob(self.fn+"*"): support.unlink(f) class TestAsciiFileShelve(TestShelveBase): _args={'protocol':0} _in_mem = False class TestBinaryFileShelve(TestShelveBase): _args={'protocol':1} _in_mem = False class TestProto2FileShelve(TestShelveBase): _args={'protocol':2} _in_mem = False class TestAsciiMemShelve(TestShelveBase): _args={'protocol':0} _in_mem = True class TestBinaryMemShelve(TestShelveBase): _args={'protocol':1} _in_mem = True class TestProto2MemShelve(TestShelveBase): _args={'protocol':2} _in_mem = True def test_main(): for module in dbm_iterator(): support.run_unittest( TestAsciiFileShelve, TestBinaryFileShelve, TestProto2FileShelve, TestAsciiMemShelve, TestBinaryMemShelve, TestProto2MemShelve, TestCase ) if __name__ == "__main__": test_main()
gpl-3.0
IONISx/edx-platform
common/djangoapps/third_party_auth/migrations/0004_lti_tool_consumers.py
32
11936
# -*- coding: utf-8 -*- # pylint: disable=C,E,F,R,W from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'LTIProviderConfig' db.create_table('third_party_auth_ltiproviderconfig', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('change_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), ('changed_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, on_delete=models.PROTECT)), ('enabled', self.gf('django.db.models.fields.BooleanField')(default=False)), ('icon_class', self.gf('django.db.models.fields.CharField')(default='fa-sign-in', max_length=50)), ('name', self.gf('django.db.models.fields.CharField')(max_length=50)), ('secondary', self.gf('django.db.models.fields.BooleanField')(default=False)), ('skip_registration_form', self.gf('django.db.models.fields.BooleanField')(default=False)), ('skip_email_verification', self.gf('django.db.models.fields.BooleanField')(default=False)), ('lti_consumer_key', self.gf('django.db.models.fields.CharField')(max_length=255)), ('lti_consumer_secret', self.gf('django.db.models.fields.CharField')(max_length=255)), ('lti_max_timestamp_age', self.gf('django.db.models.fields.IntegerField')(default=10)), )) db.send_create_signal('third_party_auth', ['LTIProviderConfig']) def backwards(self, orm): # Deleting model 'LTIProviderConfig' db.delete_table('third_party_auth_ltiproviderconfig') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'third_party_auth.ltiproviderconfig': { 'Meta': {'object_name': 'LTIProviderConfig'}, 'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}), 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'icon_class': ('django.db.models.fields.CharField', [], {'default': "'fa-sign-in'", 'max_length': '50'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'lti_consumer_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'lti_consumer_secret': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'lti_max_timestamp_age': ('django.db.models.fields.IntegerField', [], {'default': '10'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'secondary': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'skip_email_verification': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'skip_registration_form': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'third_party_auth.oauth2providerconfig': { 'Meta': {'object_name': 'OAuth2ProviderConfig'}, 'backend_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}), 'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}), 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'icon_class': ('django.db.models.fields.CharField', [], {'default': "'fa-sign-in'", 'max_length': '50'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'other_settings': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'secondary': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'secret': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'skip_email_verification': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'skip_registration_form': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'third_party_auth.samlconfiguration': { 'Meta': {'object_name': 'SAMLConfiguration'}, 'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}), 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'entity_id': ('django.db.models.fields.CharField', [], {'default': "'http://saml.example.com'", 'max_length': '255'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'org_info_str': ('django.db.models.fields.TextField', [], {'default': '\'{"en-US": {"url": "http://www.example.com", "displayname": "Example Inc.", "name": "example"}}\''}), 'other_config_str': ('django.db.models.fields.TextField', [], {'default': '\'{\\n"SECURITY_CONFIG": {"metadataCacheDuration": 604800, "signMetadata": false}\\n}\''}), 'private_key': ('django.db.models.fields.TextField', [], {}), 'public_key': ('django.db.models.fields.TextField', [], {}) }, 'third_party_auth.samlproviderconfig': { 'Meta': {'object_name': 'SAMLProviderConfig'}, 'attr_email': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'attr_first_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'attr_full_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'attr_last_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'attr_user_permanent_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'attr_username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'backend_name': ('django.db.models.fields.CharField', [], {'default': "'tpa-saml'", 'max_length': '50'}), 'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}), 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'entity_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'icon_class': ('django.db.models.fields.CharField', [], {'default': "'fa-sign-in'", 'max_length': '50'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'idp_slug': ('django.db.models.fields.SlugField', [], {'max_length': '30'}), 'metadata_source': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'other_settings': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'secondary': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'skip_email_verification': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'skip_registration_form': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'third_party_auth.samlproviderdata': { 'Meta': {'ordering': "('-fetched_at',)", 'object_name': 'SAMLProviderData'}, 'entity_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'expires_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'fetched_at': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'public_key': ('django.db.models.fields.TextField', [], {}), 'sso_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}) } } complete_apps = ['third_party_auth']
agpl-3.0
mikeclement/SiK
Firmware/tools/mavtester.py
9
10626
#!/usr/bin/env python ''' test MAVLink performance between two radios ''' import sys, time, os, threading, Queue from optparse import OptionParser parser = OptionParser("mavtester.py [options]") parser.add_option("--baudrate", type='int', help="connection baud rate", default=57600) parser.add_option("--port1", default=None, help="serial port 1") parser.add_option("--port2", default=None, help="serial port 2") parser.add_option("--rate", default=4, type='float', help="initial stream rate") parser.add_option("--override-rate", default=1, type='float', help="RC_OVERRIDE rate") parser.add_option("--show", action='store_true', default=False, help="show messages") parser.add_option("--rtscts", action='store_true', default=False, help="enable RTSCTS hardware flow control") (opts, args) = parser.parse_args() from pymavlink import mavutil if opts.port1 is None or opts.port2 is None: print("You must specify two serial ports") sys.exit(1) # create GCS connection gcs = mavutil.mavlink_connection(opts.port1, baud=opts.baudrate, input=True) gcs.setup_logfile('gcs.tlog') vehicle = mavutil.mavlink_connection(opts.port2, baud=opts.baudrate, input=False) vehicle.setup_logfile('vehicle.tlog') if opts.rtscts: gcs.set_rtscts(True) vehicle.set_rtscts(True) # we use thread based receive to avoid problems with serial buffer overflow in the Linux kernel. def receive_thread(mav, q): '''continuously receive packets are put them in the queue''' while True: m = mav.recv_match(blocking=True) if m is not None: q.put(m) # start receive threads for the gcs_queue = Queue.Queue() gcs_thread = threading.Thread(target=receive_thread, args=(gcs, gcs_queue)) gcs_thread.daemon = True gcs_thread.start() vehicle_queue = Queue.Queue() vehicle_thread = threading.Thread(target=receive_thread, args=(vehicle, vehicle_queue)) vehicle_thread.daemon = True vehicle_thread.start() start_time = time.time() last_vehicle_send = time.time() last_gcs_send = time.time() last_override_send = time.time() vehicle_lat = 0 gcs_lat = 0 def send_telemetry(): ''' send telemetry packets from the vehicle to the GCS. This emulates the typical pattern of telemetry in ArduPlane 2.75 in AUTO mode ''' global last_vehicle_send, vehicle_lat now = time.time() # send at rate specified by user. This doesn't do rate adjustment yet (APM does adjustment # based on RADIO packets) if now - last_vehicle_send < 1.0/opts.rate: return last_vehicle_send = now time_usec = int((now - start_time) * 1.0e6) time_ms = time_usec // 1000 vehicle.mav.heartbeat_send(1, 3, 217, 10, 4, 3) vehicle.mav.global_position_int_send(time_ms, vehicle_lat, 1491642131, 737900, 140830, 2008, -433, 224, 35616) vehicle.mav.rc_channels_scaled_send(time_boot_ms=time_ms, port=0, chan1_scaled=280, chan2_scaled=3278, chan3_scaled=-3023, chan4_scaled=0, chan5_scaled=0, chan6_scaled=0, chan7_scaled=0, chan8_scaled=0, rssi=0) vehicle.mav.servo_output_raw_send(time_usec=time_usec, port=0, servo1_raw=1470, servo2_raw=1628, servo3_raw=1479, servo4_raw=1506, servo5_raw=1500, servo6_raw=1556, servo7_raw=1500, servo8_raw=1500) vehicle.mav.rc_channels_raw_send(time_boot_ms=time_ms, port=0, chan1_raw=1470, chan2_raw=1618, chan3_raw=1440, chan4_raw=1509, chan5_raw=1168, chan6_raw=1556, chan7_raw=1224, chan8_raw=994, rssi=0) vehicle.mav.raw_imu_send(time_usec, 562, 382, -3917, -3330, 3445, 35, -24, 226, -523) vehicle.mav.scaled_pressure_send(time_boot_ms=time_ms, press_abs=950.770019531, press_diff=-0.0989062488079, temperature=463) vehicle.mav.sensor_offsets_send(mag_ofs_x=-68, mag_ofs_y=-143, mag_ofs_z=-34, mag_declination=0.206146687269, raw_press=95077, raw_temp=463, gyro_cal_x=-0.063114002347, gyro_cal_y=0.0479440018535, gyro_cal_z=0.0190890002996, accel_cal_x=0.418922990561, accel_cal_y=0.284875005484, accel_cal_z=-0.436598002911) vehicle.mav.sys_status_send(onboard_control_sensors_present=64559, onboard_control_sensors_enabled=64559, onboard_control_sensors_health=64559, load=82, voltage_battery=11877, current_battery=0, battery_remaining=100, drop_rate_comm=0, errors_comm=0, errors_count1=0, errors_count2=0, errors_count3=0, errors_count4=0) vehicle.mav.mission_current_send(seq=1) vehicle.mav.gps_raw_int_send(time_usec=time_usec, fix_type=3, lat=-353637616, lon=1491642012, alt=737900, eph=169, epv=65535, vel=2055, cog=34782, satellites_visible=9) vehicle.mav.nav_controller_output_send(nav_roll=0.0, nav_pitch=0.319999992847, nav_bearing=-18, target_bearing=343, wp_dist=383, alt_error=-37.0900001526, aspd_error=404.800537109, xtrack_error=1.52732038498) vehicle.mav.attitude_send(time_boot_ms=time_ms, roll=0.00283912196755, pitch=-0.0538846850395, yaw=-0.0708072632551, rollspeed=0.226980209351, pitchspeed=-0.00743395090103, yawspeed=-0.154820173979) vehicle.mav.vfr_hud_send(airspeed=21.9519939423, groundspeed=20.5499992371, heading=355, throttle=35, alt=737.900024414, climb=-0.784280121326) vehicle.mav.ahrs_send(omegaIx=0.000540865410585, omegaIy=-0.00631708558649, omegaIz=0.00380697473884, accel_weight=0.0, renorm_val=0.0, error_rp=0.094664350152, error_yaw=0.0121578350663) vehicle.mav.hwstatus_send(Vcc=0, I2Cerr=0) vehicle.mav.wind_send(direction=27.729429245, speed=5.35723495483, speed_z=-1.92264056206) vehicle_lat += 1 def send_GCS(): ''' send GCS heartbeat messages ''' global last_gcs_send now = time.time() if now - last_gcs_send < 1.0: return gcs.mav.heartbeat_send(1, 6, 0, 0, 0, 0) last_gcs_send = now def send_override(): ''' send RC_CHANNELS_OVERRIDE messages from GCS ''' global last_override_send now = time.time() if opts.override_rate == 0: return if now - last_override_send < 1.0/opts.override_rate: return time_ms = int((now - start_time) * 1.0e3) time_ms_low = time_ms % 65536 time_ms_high = (time_ms - time_ms_low) // 65536 gcs.mav.rc_channels_override_send(1, 2, time_ms_low, time_ms_high, 0, 0, 0, 0, 0, 0) last_override_send = now def process_override(m): ''' process an incoming RC_CHANNELS_OVERRIDE message, measuring latency ''' now = time.time() time_ms_sent = m.chan2_raw*65536 + m.chan1_raw time_ms = int((now - start_time) * 1.0e3) latency = time_ms - time_ms_sent stats.latency_count += 1 stats.latency_total += latency if stats.latency_min == 0 or latency < stats.latency_min: stats.latency_min = latency if latency > stats.latency_max: stats.latency_max = latency def recv_vehicle(): ''' receive packets in the vehicle ''' try: m = vehicle_queue.get(block=False) except Queue.Empty: return False if m.get_type() == 'BAD_DATA': stats.vehicle_bad_data += 1 return True if opts.show: print(m) stats.vehicle_received += 1 if m.get_type() in ['RADIO','RADIO_STATUS']: stats.vehicle_radio_received += 1 stats.vehicle_txbuf = m.txbuf stats.vehicle_fixed = m.fixed if m.get_type() == 'RC_CHANNELS_OVERRIDE': process_override(m) return True def recv_GCS(): ''' receive packets in the GCS ''' try: m = gcs_queue.get(block=False) except Queue.Empty: return False if m.get_type() == 'BAD_DATA': stats.gcs_bad_data += 1 return True if m.get_type() == 'GLOBAL_POSITION_INT': global gcs_lat if gcs_lat != m.lat: print("Lost %u GLOBAL_POSITION_INT messages" % (m.lat - gcs_lat)) gcs_lat = m.lat gcs_lat += 1 if opts.show: print(m) stats.gcs_received += 1 if m.get_type() in ['RADIO','RADIO_STATUS']: stats.gcs_radio_received += 1 stats.gcs_txbuf = m.txbuf stats.gcs_fixed = m.fixed return True class PacketStats(object): ''' class to hold statistics on the link ''' def __init__(self): self.gcs_sent = 0 self.vehicle_sent = 0 self.gcs_received = 0 self.vehicle_received = 0 self.gcs_radio_received = 0 self.vehicle_radio_received = 0 self.gcs_last_bytes_sent = 0 self.vehicle_last_bytes_sent = 0 self.latency_count = 0 self.latency_total = 0 self.latency_min = 0 self.latency_max = 0 self.vehicle_bad_data = 0 self.gcs_bad_data = 0 self.last_gcs_radio = None self.last_vehicle_radio = None self.vehicle_txbuf = 100 self.gcs_txbuf = 100 self.vehicle_fixed = 0 self.gcs_fixed = 0 def __str__(self): gcs_bytes_sent = gcs.mav.total_bytes_sent - self.gcs_last_bytes_sent vehicle_bytes_sent = vehicle.mav.total_bytes_sent - self.vehicle_last_bytes_sent self.gcs_last_bytes_sent = gcs.mav.total_bytes_sent self.vehicle_last_bytes_sent = vehicle.mav.total_bytes_sent avg_latency = 0 if stats.latency_count != 0: avg_latency = stats.latency_total / stats.latency_count return "Veh:%u/%u/%u GCS:%u/%u/%u pend:%u rates:%u/%u lat:%u/%u/%u bad:%u/%u txbuf:%u/%u loss:%u:%u%%/%u:%u%% fixed:%u/%u" % ( self.vehicle_sent, self.vehicle_received, self.vehicle_received - self.vehicle_radio_received, self.gcs_sent, self.gcs_received, self.gcs_received - self.gcs_radio_received, self.vehicle_sent - (self.gcs_received - self.gcs_radio_received), gcs_bytes_sent, vehicle_bytes_sent, stats.latency_min, stats.latency_max, avg_latency, self.vehicle_bad_data, self.gcs_bad_data, self.vehicle_txbuf, self.gcs_txbuf, gcs.mav_loss, gcs.packet_loss(), vehicle.mav_loss, vehicle.packet_loss(), stats.vehicle_fixed, stats.gcs_fixed) ''' main code ''' last_report = time.time() stats = PacketStats() while True: send_telemetry() stats.vehicle_sent = vehicle.mav.total_packets_sent send_GCS() send_override() stats.gcs_sent = gcs.mav.total_packets_sent while True: recv1 = recv_vehicle() recv2 = recv_GCS() if not recv1 and not recv2: break if time.time() - last_report >= 1.0: print(stats) last_report = time.time()
bsd-2-clause
tfar/RIOT
tests/lwip/tests/01-run.py
24
9890
#! /usr/bin/env python # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright © 2016 Martine Lenders <mail@martine-lenders.eu> # # Distributed under terms of the MIT license. from __future__ import print_function import argparse import os, sys import random import pexpect import subprocess import time import types DEFAULT_TIMEOUT = 5 class Strategy(object): def __init__(self, func=None): if func != None: if sys.version_info < (3,): self.__class__.execute = types.MethodType(func, self, self.__class__) else: self.__class__.execute = types.MethodType(func, self) def execute(self, *args, **kwargs): raise NotImplementedError() class ApplicationStrategy(Strategy): def __init__(self, app_dir=os.getcwd(), func=None): super(ApplicationStrategy, self).__init__(func) self.app_dir = app_dir class BoardStrategy(Strategy): def __init__(self, board, func=None): super(BoardStrategy, self).__init__(func) self.board = board def __run_make(self, application, make_targets, env=None): env = os.environ.copy() if env != None: env.update(env) env.update(self.board.to_env()) cmd = ("make", "-C", application) + make_targets print(' '.join(cmd)) print(subprocess.check_output(cmd, env=env)) def execute(self, application): super(BoardStrategy, self).execute(application) class CleanStrategy(BoardStrategy): def execute(self, application, env=None): super(CleanStrategy, self).__run_make(application, ("-B", "clean"), env) class BuildStrategy(BoardStrategy): def execute(self, application, env=None): super(BuildStrategy, self).__run_make(application, ("all",), env) class FlashStrategy(BoardStrategy): def execute(self, application, env=None): super(FlashStrategy, self).__run_make(application, ("all",), env) class ResetStrategy(BoardStrategy): def execute(self, application, env=None): super(ResetStrategy, self).__run_make(application, ("reset",), env) class Board(object): def __init__(self, name, port=None, serial=None, clean=None, build=None, flash=None, reset=None, term=None): def _reset_native_execute(obj, application, env=None, *args, **kwargs): pass if (name == "native") and (reset == None): reset = _reset_native_execute self.name = name self.port = port self.serial = serial self.clean_strategy = CleanStrategy(self, clean) self.build_strategy = BuildStrategy(self, build) self.flash_strategy = FlashStrategy(self, flash) self.reset_strategy = ResetStrategy(self, reset) def __len__(self): return 1 def __iter__(self): return self def next(self): raise StopIteration() def __repr__(self): return ("<Board %s,port=%s,serial=%s>" % (repr(self.name), repr(self.port), repr(self.serial))) def to_env(self): env = {} if self.name: env['BOARD'] = self.name if self.port: env['PORT'] = self.port if self.serial: env['SERIAL'] = self.serial return env def clean(self, application=os.getcwd(), env=None): self.build_strategy.execute(application, env) def build(self, application=os.getcwd(), env=None): self.build_strategy.execute(application, env) def flash(self, application=os.getcwd(), env=None): self.flash_strategy.execute(application, env) def reset(self, application=os.getcwd(), env=None): self.reset_strategy.execute(application, env) class BoardGroup(object): def __init__(self, boards): self.boards = boards def __len__(self): return len(self.boards) def __iter__(self): return iter(self.boards) def __repr__(self): return str(self.boards) def clean(self, application=os.getcwd(), env=None): for board in self.boards: board.clean(application, env) def build(self, application=os.getcwd(), env=None): for board in self.boards: board.build(application, env) def flash(self, application=os.getcwd(), env=None): for board in self.boards: board.flash(application, env) def reset(self, application=os.getcwd(), env=None): for board in self.boards: board.reset(application, env) def default_test_case(board_group, application, env=None): for board in board_group: env = os.environ.copy() if env != None: env.update(env) env.update(board.to_env()) with pexpect.spawn("make", ["-C", application, "term"], env=env, timeout=DEFAULT_TIMEOUT, logfile=sys.stdout) as spawn: spawn.expect("TEST: SUCCESS") class TestStrategy(ApplicationStrategy): def execute(self, board_groups, test_cases=[default_test_case], timeout=DEFAULT_TIMEOUT, env=None): for board_group in board_groups: print("Testing for %s: " % board_group) for test_case in test_cases: board_group.reset() test_case(board_group, self.app_dir, env=None) sys.stdout.write('.') sys.stdout.flush() print() def get_ipv6_address(spawn): spawn.sendline(u"ifconfig") spawn.expect(u"[A-Za-z0-9]{2}[0-9]+: inet6 (fe80::[0-9a-f:]+)") return spawn.match.group(1) def test_ipv6_send(board_group, application, env=None): env_sender = os.environ.copy() if env != None: env_sender.update(env) env_sender.update(board_group.boards[0].to_env()) env_receiver = os.environ.copy() if env != None: env_receiver.update(env) env_receiver.update(board_group.boards[1].to_env()) with pexpect.spawn("make", ["-C", application, "term"], env=env_sender, timeout=DEFAULT_TIMEOUT) as sender, \ pexpect.spawn("make", ["-C", application, "term"], env=env_receiver, timeout=DEFAULT_TIMEOUT) as receiver: ipprot = random.randint(0x00, 0xff) receiver_ip = get_ipv6_address(receiver) receiver.sendline(u"ip server start %d" % ipprot) # wait for neighbor discovery to be done time.sleep(5) sender.sendline(u"ip send %s %d 01:23:45:67:89:ab:cd:ef" % (receiver_ip, ipprot)) sender.expect_exact(u"Success: send 8 byte to %s (next header: %d)" % (receiver_ip, ipprot)) receiver.expect(u"000000 60 00 00 00 00 08 %s ff fe 80 00 00 00 00 00 00" % hex(ipprot)[2:]) receiver.expect(u"000010( [0-9a-f]{2}){8} fe 80 00 00 00 00 00 00") receiver.expect(u"000020( [0-9a-f]{2}){8} 01 23 45 67 89 ab cd ef") def test_udpv6_send(board_group, application, env=None): env_sender = os.environ.copy() if env != None: env_sender.update(env) env_sender.update(board_group.boards[0].to_env()) env_receiver = os.environ.copy() if env != None: env_receiver.update(env) env_receiver.update(board_group.boards[1].to_env()) with pexpect.spawn("make", ["-C", application, "term"], env=env_sender, timeout=DEFAULT_TIMEOUT) as sender, \ pexpect.spawn("make", ["-C", application, "term"], env=env_receiver, timeout=DEFAULT_TIMEOUT) as receiver: port = random.randint(0x0000, 0xffff) receiver_ip = get_ipv6_address(receiver) receiver.sendline(u"udp server start %d" % port) # wait for neighbor discovery to be done time.sleep(5) sender.sendline(u"udp send %s %d ab:cd:ef" % (receiver_ip, port)) sender.expect_exact(u"Success: send 3 byte to [%s]:%d" % (receiver_ip, port)) receiver.expect(u"000000 ab cd ef") def test_dual_send(board_group, application, env=None): env_sender = os.environ.copy() if env != None: env_sender.update(env) env_sender.update(board_group.boards[0].to_env()) env_receiver = os.environ.copy() if env != None: env_receiver.update(env) env_receiver.update(board_group.boards[1].to_env()) with pexpect.spawn("make", ["-C", application, "term"], env=env_sender, timeout=DEFAULT_TIMEOUT) as sender, \ pexpect.spawn("make", ["-C", application, "term"], env=env_receiver, timeout=DEFAULT_TIMEOUT) as receiver: port = random.randint(0x0000, 0xffff) ipprot = random.randint(0x00, 0xff) receiver_ip = get_ipv6_address(receiver) receiver.sendline(u"ip server start %d" % ipprot) receiver.sendline(u"udp server start %d" % port) # wait for neighbor discovery to be done time.sleep(5) sender.sendline(u"udp send %s %d 01:23" % (receiver_ip, port)) sender.expect_exact(u"Success: send 2 byte to [%s]:%d" % (receiver_ip, port)) receiver.expect(u"000000 01 23") sender.sendline(u"ip send %s %d 01:02:03:04" % (receiver_ip, ipprot)) sender.expect_exact(u"Success: send 4 byte to %s (next header: %d)" % (receiver_ip, ipprot)) receiver.expect(u"000000 60 00 00 00 00 04 %s ff fe 80 00 00 00 00 00 00" % hex(ipprot)[2:]) receiver.expect(u"000010( [0-9a-f]{2}){8} fe 80 00 00 00 00 00 00") receiver.expect(u"000020( [0-9a-f]{2}){8} 01 02 03 04") if __name__ == "__main__": del os.environ['TERMFLAGS'] TestStrategy().execute([BoardGroup((Board("native", "tap0"), \ Board("native", "tap1")))], \ [test_ipv6_send, test_udpv6_send, test_dual_send])
lgpl-2.1
akshatharaj/django
tests/distinct_on_fields/tests.py
263
5996
from __future__ import unicode_literals from django.db.models import Max from django.test import TestCase, skipUnlessDBFeature from django.test.utils import str_prefix from .models import Celebrity, Fan, Staff, StaffTag, Tag @skipUnlessDBFeature('can_distinct_on_fields') @skipUnlessDBFeature('supports_nullable_unique_constraints') class DistinctOnTests(TestCase): def setUp(self): t1 = Tag.objects.create(name='t1') Tag.objects.create(name='t2', parent=t1) t3 = Tag.objects.create(name='t3', parent=t1) Tag.objects.create(name='t4', parent=t3) Tag.objects.create(name='t5', parent=t3) self.p1_o1 = Staff.objects.create(id=1, name="p1", organisation="o1") self.p2_o1 = Staff.objects.create(id=2, name="p2", organisation="o1") self.p3_o1 = Staff.objects.create(id=3, name="p3", organisation="o1") self.p1_o2 = Staff.objects.create(id=4, name="p1", organisation="o2") self.p1_o1.coworkers.add(self.p2_o1, self.p3_o1) StaffTag.objects.create(staff=self.p1_o1, tag=t1) StaffTag.objects.create(staff=self.p1_o1, tag=t1) celeb1 = Celebrity.objects.create(name="c1") celeb2 = Celebrity.objects.create(name="c2") self.fan1 = Fan.objects.create(fan_of=celeb1) self.fan2 = Fan.objects.create(fan_of=celeb1) self.fan3 = Fan.objects.create(fan_of=celeb2) def test_basic_distinct_on(self): """QuerySet.distinct('field', ...) works""" # (qset, expected) tuples qsets = ( ( Staff.objects.distinct().order_by('name'), ['<Staff: p1>', '<Staff: p1>', '<Staff: p2>', '<Staff: p3>'], ), ( Staff.objects.distinct('name').order_by('name'), ['<Staff: p1>', '<Staff: p2>', '<Staff: p3>'], ), ( Staff.objects.distinct('organisation').order_by('organisation', 'name'), ['<Staff: p1>', '<Staff: p1>'], ), ( Staff.objects.distinct('name', 'organisation').order_by('name', 'organisation'), ['<Staff: p1>', '<Staff: p1>', '<Staff: p2>', '<Staff: p3>'], ), ( Celebrity.objects.filter(fan__in=[self.fan1, self.fan2, self.fan3]).distinct('name').order_by('name'), ['<Celebrity: c1>', '<Celebrity: c2>'], ), # Does combining querysets work? ( (Celebrity.objects.filter(fan__in=[self.fan1, self.fan2]). distinct('name').order_by('name') | Celebrity.objects.filter(fan__in=[self.fan3]). distinct('name').order_by('name')), ['<Celebrity: c1>', '<Celebrity: c2>'], ), ( StaffTag.objects.distinct('staff', 'tag'), ['<StaffTag: t1 -> p1>'], ), ( Tag.objects.order_by('parent__pk', 'pk').distinct('parent'), ['<Tag: t2>', '<Tag: t4>', '<Tag: t1>'], ), ( StaffTag.objects.select_related('staff').distinct('staff__name').order_by('staff__name'), ['<StaffTag: t1 -> p1>'], ), # Fetch the alphabetically first coworker for each worker ( (Staff.objects.distinct('id').order_by('id', 'coworkers__name'). values_list('id', 'coworkers__name')), [str_prefix("(1, %(_)s'p2')"), str_prefix("(2, %(_)s'p1')"), str_prefix("(3, %(_)s'p1')"), "(4, None)"] ), ) for qset, expected in qsets: self.assertQuerysetEqual(qset, expected) self.assertEqual(qset.count(), len(expected)) # Combining queries with different distinct_fields is not allowed. base_qs = Celebrity.objects.all() self.assertRaisesMessage( AssertionError, "Cannot combine queries with different distinct fields.", lambda: (base_qs.distinct('id') & base_qs.distinct('name')) ) # Test join unreffing c1 = Celebrity.objects.distinct('greatest_fan__id', 'greatest_fan__fan_of') self.assertIn('OUTER JOIN', str(c1.query)) c2 = c1.distinct('pk') self.assertNotIn('OUTER JOIN', str(c2.query)) def test_distinct_not_implemented_checks(self): # distinct + annotate not allowed with self.assertRaises(NotImplementedError): Celebrity.objects.annotate(Max('id')).distinct('id')[0] with self.assertRaises(NotImplementedError): Celebrity.objects.distinct('id').annotate(Max('id'))[0] # However this check is done only when the query executes, so you # can use distinct() to remove the fields before execution. Celebrity.objects.distinct('id').annotate(Max('id')).distinct()[0] # distinct + aggregate not allowed with self.assertRaises(NotImplementedError): Celebrity.objects.distinct('id').aggregate(Max('id')) def test_distinct_on_in_ordered_subquery(self): qs = Staff.objects.distinct('name').order_by('name', 'id') qs = Staff.objects.filter(pk__in=qs).order_by('name') self.assertQuerysetEqual( qs, [self.p1_o1, self.p2_o1, self.p3_o1], lambda x: x ) qs = Staff.objects.distinct('name').order_by('name', '-id') qs = Staff.objects.filter(pk__in=qs).order_by('name') self.assertQuerysetEqual( qs, [self.p1_o2, self.p2_o1, self.p3_o1], lambda x: x ) def test_distinct_on_get_ordering_preserved(self): """ Ordering shouldn't be cleared when distinct on fields are specified. refs #25081 """ staff = Staff.objects.distinct('name').order_by('name', '-organisation').get(name='p1') self.assertEqual(staff.organisation, 'o2')
bsd-3-clause
bittner/django-allauth
allauth/socialaccount/providers/mailru/views.py
10
1290
import requests from hashlib import md5 from allauth.socialaccount.providers.oauth2.views import ( OAuth2Adapter, OAuth2CallbackView, OAuth2LoginView, ) from .provider import MailRuProvider class MailRuOAuth2Adapter(OAuth2Adapter): provider_id = MailRuProvider.id access_token_url = 'https://connect.mail.ru/oauth/token' authorize_url = 'https://connect.mail.ru/oauth/authorize' profile_url = 'http://www.appsmail.ru/platform/api' def complete_login(self, request, app, token, **kwargs): uid = kwargs['response']['x_mailru_vid'] data = {'method': 'users.getInfo', 'app_id': app.client_id, 'secure': '1', 'uids': uid} param_list = sorted(list(item + '=' + data[item] for item in data)) data['sig'] = md5( (''.join(param_list) + app.secret).encode('utf-8') ).hexdigest() response = requests.get(self.profile_url, params=data) extra_data = response.json()[0] return self.get_provider().sociallogin_from_response(request, extra_data) oauth2_login = OAuth2LoginView.adapter_view(MailRuOAuth2Adapter) oauth2_callback = OAuth2CallbackView.adapter_view(MailRuOAuth2Adapter)
mit
AsgerPetersen/QGIS
python/plugins/processing/algs/grass7/ext/r_li_shannon_ascii.py
3
1385
# -*- coding: utf-8 -*- """ *************************************************************************** r_li_shannon_ascii.py --------------------- Date : February 2016 Copyright : (C) 2016 by Médéric Ribreux Email : medspx at medspx dot fr *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Médéric Ribreux' __date__ = 'February 2016' __copyright__ = '(C) 2016, Médéric Ribreux' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from r_li import checkMovingWindow, configFile, moveOutputTxtFile def checkParameterValuesBeforeExecuting(alg): return checkMovingWindow(alg, True) def processCommand(alg): configFile(alg, True) def processOutputs(alg): moveOutputTxtFile(alg)
gpl-2.0
accraze/bitcoin
qa/rpc-tests/receivedby.py
18
7492
#!/usr/bin/env python2 # Copyright (c) 2014-2015 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # Exercise the listreceivedbyaddress API from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * def get_sub_array_from_array(object_array, to_match): ''' Finds and returns a sub array from an array of arrays. to_match should be a unique idetifier of a sub array ''' num_matched = 0 for item in object_array: all_match = True for key,value in to_match.items(): if item[key] != value: all_match = False if not all_match: continue return item return [] def check_array_result(object_array, to_match, expected, should_not_find = False): """ Pass in array of JSON objects, a dictionary with key/value pairs to match against, and another dictionary with expected key/value pairs. If the should_not_find flag is true, to_match should not be found in object_array """ if should_not_find == True: expected = { } num_matched = 0 for item in object_array: all_match = True for key,value in to_match.items(): if item[key] != value: all_match = False if not all_match: continue for key,value in expected.items(): if item[key] != value: raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value))) num_matched = num_matched+1 if num_matched == 0 and should_not_find != True: raise AssertionError("No objects matched %s"%(str(to_match))) if num_matched > 0 and should_not_find == True: raise AssertionError("Objects was matched %s"%(str(to_match))) class ReceivedByTest(BitcoinTestFramework): def setup_nodes(self): #This test requires mocktime enable_mocktime() return start_nodes(4, self.options.tmpdir) def run_test(self): ''' listreceivedbyaddress Test ''' # Send from node 0 to 1 addr = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(addr, 0.1) self.sync_all() #Check not listed in listreceivedbyaddress because has 0 confirmations check_array_result(self.nodes[1].listreceivedbyaddress(), {"address":addr}, { }, True) #Bury Tx under 10 block so it will be returned by listreceivedbyaddress self.nodes[1].generate(10) self.sync_all() check_array_result(self.nodes[1].listreceivedbyaddress(), {"address":addr}, {"address":addr, "account":"", "amount":Decimal("0.1"), "confirmations":10, "txids":[txid,]}) #With min confidence < 10 check_array_result(self.nodes[1].listreceivedbyaddress(5), {"address":addr}, {"address":addr, "account":"", "amount":Decimal("0.1"), "confirmations":10, "txids":[txid,]}) #With min confidence > 10, should not find Tx check_array_result(self.nodes[1].listreceivedbyaddress(11),{"address":addr},{ },True) #Empty Tx addr = self.nodes[1].getnewaddress() check_array_result(self.nodes[1].listreceivedbyaddress(0,True), {"address":addr}, {"address":addr, "account":"", "amount":0, "confirmations":0, "txids":[]}) ''' getreceivedbyaddress Test ''' # Send from node 0 to 1 addr = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(addr, 0.1) self.sync_all() #Check balance is 0 because of 0 confirmations balance = self.nodes[1].getreceivedbyaddress(addr) if balance != Decimal("0.0"): raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance)) #Check balance is 0.1 balance = self.nodes[1].getreceivedbyaddress(addr,0) if balance != Decimal("0.1"): raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance)) #Bury Tx under 10 block so it will be returned by the default getreceivedbyaddress self.nodes[1].generate(10) self.sync_all() balance = self.nodes[1].getreceivedbyaddress(addr) if balance != Decimal("0.1"): raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance)) ''' listreceivedbyaccount + getreceivedbyaccount Test ''' #set pre-state addrArr = self.nodes[1].getnewaddress() account = self.nodes[1].getaccount(addrArr) received_by_account_json = get_sub_array_from_array(self.nodes[1].listreceivedbyaccount(),{"account":account}) if len(received_by_account_json) == 0: raise AssertionError("No accounts found in node") balance_by_account = rec_by_accountArr = self.nodes[1].getreceivedbyaccount(account) txid = self.nodes[0].sendtoaddress(addr, 0.1) self.sync_all() # listreceivedbyaccount should return received_by_account_json because of 0 confirmations check_array_result(self.nodes[1].listreceivedbyaccount(), {"account":account}, received_by_account_json) # getreceivedbyaddress should return same balance because of 0 confirmations balance = self.nodes[1].getreceivedbyaccount(account) if balance != balance_by_account: raise AssertionError("Wrong balance returned by getreceivedbyaccount, %0.2f"%(balance)) self.nodes[1].generate(10) self.sync_all() # listreceivedbyaccount should return updated account balance check_array_result(self.nodes[1].listreceivedbyaccount(), {"account":account}, {"account":received_by_account_json["account"], "amount":(received_by_account_json["amount"] + Decimal("0.1"))}) # getreceivedbyaddress should return updates balance balance = self.nodes[1].getreceivedbyaccount(account) if balance != balance_by_account + Decimal("0.1"): raise AssertionError("Wrong balance returned by getreceivedbyaccount, %0.2f"%(balance)) #Create a new account named "mynewaccount" that has a 0 balance self.nodes[1].getaccountaddress("mynewaccount") received_by_account_json = get_sub_array_from_array(self.nodes[1].listreceivedbyaccount(0,True),{"account":"mynewaccount"}) if len(received_by_account_json) == 0: raise AssertionError("No accounts found in node") # Test includeempty of listreceivedbyaccount if received_by_account_json["amount"] != Decimal("0.0"): raise AssertionError("Wrong balance returned by listreceivedbyaccount, %0.2f"%(received_by_account_json["amount"])) # Test getreceivedbyaccount for 0 amount accounts balance = self.nodes[1].getreceivedbyaccount("mynewaccount") if balance != Decimal("0.0"): raise AssertionError("Wrong balance returned by getreceivedbyaccount, %0.2f"%(balance)) if __name__ == '__main__': ReceivedByTest().main()
mit
stack-of-tasks/rbdlpy
tutorial/lib/python2.7/site-packages/OpenGL/GL/MESA/resize_buffers.py
9
1565
'''OpenGL extension MESA.resize_buffers This module customises the behaviour of the OpenGL.raw.GL.MESA.resize_buffers to provide a more Python-friendly API Overview (from the spec) Mesa is often used as a client library with no integration with the computer's window system (an X server, for example). And since Mesa does not have an event loop nor window system callbacks, it cannot properly respond to window system events. In particular, Mesa cannot automatically detect when a window has been resized. Mesa's glViewport command queries the current window size and updates its internal data structors accordingly. This normally works fine since most applications call glViewport in responce to window size changes. In some situations, however, the application may not call glViewport when a window size changes but would still like Mesa to adjust to the new window size. This extension exports a new function to solve this problem. The official definition of this extension is available here: http://www.opengl.org/registry/specs/MESA/resize_buffers.txt ''' from OpenGL import platform, constant, arrays from OpenGL import extensions, wrapper import ctypes from OpenGL.raw.GL import _types, _glgets from OpenGL.raw.GL.MESA.resize_buffers import * from OpenGL.raw.GL.MESA.resize_buffers import _EXTENSION_NAME def glInitResizeBuffersMESA(): '''Return boolean indicating whether this extension is available''' from OpenGL import extensions return extensions.hasGLExtension( _EXTENSION_NAME ) ### END AUTOGENERATED SECTION
lgpl-3.0
KimLemon/AKL-Kernel
tools/perf/scripts/python/futex-contention.py
11261
1486
# futex contention # (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com> # Licensed under the terms of the GNU GPL License version 2 # # Translation of: # # http://sourceware.org/systemtap/wiki/WSFutexContention # # to perf python scripting. # # Measures futex contention import os, sys sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from Util import * process_names = {} thread_thislock = {} thread_blocktime = {} lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time process_names = {} # long-lived pid-to-execname mapping def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm, nr, uaddr, op, val, utime, uaddr2, val3): cmd = op & FUTEX_CMD_MASK if cmd != FUTEX_WAIT: return # we don't care about originators of WAKE events process_names[tid] = comm thread_thislock[tid] = uaddr thread_blocktime[tid] = nsecs(s, ns) def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm, nr, ret): if thread_blocktime.has_key(tid): elapsed = nsecs(s, ns) - thread_blocktime[tid] add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed) del thread_blocktime[tid] del thread_thislock[tid] def trace_begin(): print "Press control+C to stop and show the summary" def trace_end(): for (tid, lock) in lock_waits: min, max, avg, count = lock_waits[tid, lock] print "%s[%d] lock %x contended %d times, %d avg ns" % \ (process_names[tid], tid, lock, count, avg)
gpl-2.0
christophlsa/odoo
addons/account/wizard/account_period_close.py
341
2646
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv from openerp.tools.translate import _ class account_period_close(osv.osv_memory): """ close period """ _name = "account.period.close" _description = "period close" _columns = { 'sure': fields.boolean('Check this box'), } def data_save(self, cr, uid, ids, context=None): """ This function close period @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: account period close’s ID or list of IDs """ journal_period_pool = self.pool.get('account.journal.period') period_pool = self.pool.get('account.period') account_move_obj = self.pool.get('account.move') mode = 'done' for form in self.read(cr, uid, ids, context=context): if form['sure']: for id in context['active_ids']: account_move_ids = account_move_obj.search(cr, uid, [('period_id', '=', id), ('state', '=', "draft")], context=context) if account_move_ids: raise osv.except_osv(_('Invalid Action!'), _('In order to close a period, you must first post related journal entries.')) cr.execute('update account_journal_period set state=%s where period_id=%s', (mode, id)) cr.execute('update account_period set state=%s where id=%s', (mode, id)) self.invalidate_cache(cr, uid, context=context) return {'type': 'ir.actions.act_window_close'} # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
bob-the-hamster/commandergenius
project/jni/python/src/Tools/freeze/makeconfig.py
37
1668
import re # Write the config.c file never = ['marshal', '__main__', '__builtin__', 'sys', 'exceptions'] def makeconfig(infp, outfp, modules, with_ifdef=0): m1 = re.compile('-- ADDMODULE MARKER 1 --') m2 = re.compile('-- ADDMODULE MARKER 2 --') while 1: line = infp.readline() if not line: break outfp.write(line) if m1 and m1.search(line): m1 = None for mod in modules: if mod in never: continue if with_ifdef: outfp.write("#ifndef init%s\n"%mod) outfp.write('extern void init%s(void);\n' % mod) if with_ifdef: outfp.write("#endif\n") elif m2 and m2.search(line): m2 = None for mod in modules: if mod in never: continue outfp.write('\t{"%s", init%s},\n' % (mod, mod)) if m1: sys.stderr.write('MARKER 1 never found\n') elif m2: sys.stderr.write('MARKER 2 never found\n') # Test program. def test(): import sys if not sys.argv[3:]: print 'usage: python makeconfig.py config.c.in outputfile', print 'modulename ...' sys.exit(2) if sys.argv[1] == '-': infp = sys.stdin else: infp = open(sys.argv[1]) if sys.argv[2] == '-': outfp = sys.stdout else: outfp = open(sys.argv[2], 'w') makeconfig(infp, outfp, sys.argv[3:]) if outfp != sys.stdout: outfp.close() if infp != sys.stdin: infp.close() if __name__ == '__main__': test()
lgpl-2.1
atizo/pygobject
ltihooks.py
1
2327
# -*- Mode: Python; py-indent-offset: 4 -*- # ltihooks.py: python import hooks that understand libtool libraries. # Copyright (C) 2000 James Henstridge. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import os, ihooks class LibtoolHooks(ihooks.Hooks): def get_suffixes(self): """Like normal get_suffixes, but adds .la suffixes to list""" ret = ihooks.Hooks.get_suffixes(self) ret.insert(0, ('module.la', 'rb', 3)) ret.insert(0, ('.la', 'rb', 3)) return ret def load_dynamic(self, name, filename, file=None): """Like normal load_dynamic, but treat .la files specially""" if len(filename) > 3 and filename[-3:] == '.la': fp = open(filename, 'r') dlname = '' installed = 1 line = fp.readline() while line: if len(line) > 7 and line[:7] == 'dlname=': dlname = line[8:-2] elif len(line) > 10 and line[:10] == 'installed=': installed = line[10:-1] == 'yes' line = fp.readline() fp.close() if dlname: if installed: filename = os.path.join(os.path.dirname(filename), dlname) else: filename = os.path.join(os.path.dirname(filename), '.libs', dlname) return ihooks.Hooks.load_dynamic(self, name, filename, file) importer = ihooks.ModuleImporter() importer.set_hooks(LibtoolHooks()) def install(): importer.install() def uninstall(): importer.uninstall() install()
lgpl-2.1
metaron-uk/xbmc
lib/gtest/test/gtest_test_utils.py
408
10444
#!/usr/bin/env python # # Copyright 2006, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Unit test utilities for Google C++ Testing Framework.""" __author__ = 'wan@google.com (Zhanyong Wan)' import atexit import os import shutil import sys import tempfile import unittest _test_module = unittest # Suppresses the 'Import not at the top of the file' lint complaint. # pylint: disable-msg=C6204 try: import subprocess _SUBPROCESS_MODULE_AVAILABLE = True except: import popen2 _SUBPROCESS_MODULE_AVAILABLE = False # pylint: enable-msg=C6204 GTEST_OUTPUT_VAR_NAME = 'GTEST_OUTPUT' IS_WINDOWS = os.name == 'nt' IS_CYGWIN = os.name == 'posix' and 'CYGWIN' in os.uname()[0] # Here we expose a class from a particular module, depending on the # environment. The comment suppresses the 'Invalid variable name' lint # complaint. TestCase = _test_module.TestCase # pylint: disable-msg=C6409 # Initially maps a flag to its default value. After # _ParseAndStripGTestFlags() is called, maps a flag to its actual value. _flag_map = {'source_dir': os.path.dirname(sys.argv[0]), 'build_dir': os.path.dirname(sys.argv[0])} _gtest_flags_are_parsed = False def _ParseAndStripGTestFlags(argv): """Parses and strips Google Test flags from argv. This is idempotent.""" # Suppresses the lint complaint about a global variable since we need it # here to maintain module-wide state. global _gtest_flags_are_parsed # pylint: disable-msg=W0603 if _gtest_flags_are_parsed: return _gtest_flags_are_parsed = True for flag in _flag_map: # The environment variable overrides the default value. if flag.upper() in os.environ: _flag_map[flag] = os.environ[flag.upper()] # The command line flag overrides the environment variable. i = 1 # Skips the program name. while i < len(argv): prefix = '--' + flag + '=' if argv[i].startswith(prefix): _flag_map[flag] = argv[i][len(prefix):] del argv[i] break else: # We don't increment i in case we just found a --gtest_* flag # and removed it from argv. i += 1 def GetFlag(flag): """Returns the value of the given flag.""" # In case GetFlag() is called before Main(), we always call # _ParseAndStripGTestFlags() here to make sure the --gtest_* flags # are parsed. _ParseAndStripGTestFlags(sys.argv) return _flag_map[flag] def GetSourceDir(): """Returns the absolute path of the directory where the .py files are.""" return os.path.abspath(GetFlag('source_dir')) def GetBuildDir(): """Returns the absolute path of the directory where the test binaries are.""" return os.path.abspath(GetFlag('build_dir')) _temp_dir = None def _RemoveTempDir(): if _temp_dir: shutil.rmtree(_temp_dir, ignore_errors=True) atexit.register(_RemoveTempDir) def GetTempDir(): """Returns a directory for temporary files.""" global _temp_dir if not _temp_dir: _temp_dir = tempfile.mkdtemp() return _temp_dir def GetTestExecutablePath(executable_name, build_dir=None): """Returns the absolute path of the test binary given its name. The function will print a message and abort the program if the resulting file doesn't exist. Args: executable_name: name of the test binary that the test script runs. build_dir: directory where to look for executables, by default the result of GetBuildDir(). Returns: The absolute path of the test binary. """ path = os.path.abspath(os.path.join(build_dir or GetBuildDir(), executable_name)) if (IS_WINDOWS or IS_CYGWIN) and not path.endswith('.exe'): path += '.exe' if not os.path.exists(path): message = ( 'Unable to find the test binary. Please make sure to provide path\n' 'to the binary via the --build_dir flag or the BUILD_DIR\n' 'environment variable.') print >> sys.stderr, message sys.exit(1) return path def GetExitStatus(exit_code): """Returns the argument to exit(), or -1 if exit() wasn't called. Args: exit_code: the result value of os.system(command). """ if os.name == 'nt': # On Windows, os.WEXITSTATUS() doesn't work and os.system() returns # the argument to exit() directly. return exit_code else: # On Unix, os.WEXITSTATUS() must be used to extract the exit status # from the result of os.system(). if os.WIFEXITED(exit_code): return os.WEXITSTATUS(exit_code) else: return -1 class Subprocess: def __init__(self, command, working_dir=None, capture_stderr=True, env=None): """Changes into a specified directory, if provided, and executes a command. Restores the old directory afterwards. Args: command: The command to run, in the form of sys.argv. working_dir: The directory to change into. capture_stderr: Determines whether to capture stderr in the output member or to discard it. env: Dictionary with environment to pass to the subprocess. Returns: An object that represents outcome of the executed process. It has the following attributes: terminated_by_signal True iff the child process has been terminated by a signal. signal Sygnal that terminated the child process. exited True iff the child process exited normally. exit_code The code with which the child process exited. output Child process's stdout and stderr output combined in a string. """ # The subprocess module is the preferrable way of running programs # since it is available and behaves consistently on all platforms, # including Windows. But it is only available starting in python 2.4. # In earlier python versions, we revert to the popen2 module, which is # available in python 2.0 and later but doesn't provide required # functionality (Popen4) under Windows. This allows us to support Mac # OS X 10.4 Tiger, which has python 2.3 installed. if _SUBPROCESS_MODULE_AVAILABLE: if capture_stderr: stderr = subprocess.STDOUT else: stderr = subprocess.PIPE p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=stderr, cwd=working_dir, universal_newlines=True, env=env) # communicate returns a tuple with the file obect for the child's # output. self.output = p.communicate()[0] self._return_code = p.returncode else: old_dir = os.getcwd() def _ReplaceEnvDict(dest, src): # Changes made by os.environ.clear are not inheritable by child # processes until Python 2.6. To produce inheritable changes we have # to delete environment items with the del statement. for key in dest.keys(): del dest[key] dest.update(src) # When 'env' is not None, backup the environment variables and replace # them with the passed 'env'. When 'env' is None, we simply use the # current 'os.environ' for compatibility with the subprocess.Popen # semantics used above. if env is not None: old_environ = os.environ.copy() _ReplaceEnvDict(os.environ, env) try: if working_dir is not None: os.chdir(working_dir) if capture_stderr: p = popen2.Popen4(command) else: p = popen2.Popen3(command) p.tochild.close() self.output = p.fromchild.read() ret_code = p.wait() finally: os.chdir(old_dir) # Restore the old environment variables # if they were replaced. if env is not None: _ReplaceEnvDict(os.environ, old_environ) # Converts ret_code to match the semantics of # subprocess.Popen.returncode. if os.WIFSIGNALED(ret_code): self._return_code = -os.WTERMSIG(ret_code) else: # os.WIFEXITED(ret_code) should return True here. self._return_code = os.WEXITSTATUS(ret_code) if self._return_code < 0: self.terminated_by_signal = True self.exited = False self.signal = -self._return_code else: self.terminated_by_signal = False self.exited = True self.exit_code = self._return_code def Main(): """Runs the unit test.""" # We must call _ParseAndStripGTestFlags() before calling # unittest.main(). Otherwise the latter will be confused by the # --gtest_* flags. _ParseAndStripGTestFlags(sys.argv) # The tested binaries should not be writing XML output files unless the # script explicitly instructs them to. # TODO(vladl@google.com): Move this into Subprocess when we implement # passing environment into it as a parameter. if GTEST_OUTPUT_VAR_NAME in os.environ: del os.environ[GTEST_OUTPUT_VAR_NAME] _test_module.main()
gpl-2.0
alon/servo
tests/wpt/web-platform-tests/tools/pywebsocket/src/example/internal_error_wsh.py
465
1738
# Copyright 2012, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from mod_pywebsocket import msgutil def web_socket_do_extra_handshake(request): pass def web_socket_transfer_data(request): raise msgutil.BadOperationException('Intentional') # vi:sts=4 sw=4 et
mpl-2.0
837468220/python-for-android
python-modules/twisted/twisted/trial/test/mockdoctest.py
64
2671
# Copyright (c) 2001-2004 Twisted Matrix Laboratories. # See LICENSE for details. # this module is a trivial class with doctests and a __test__ attribute # to test trial's doctest support with python2.4 class Counter(object): """a simple counter object for testing trial's doctest support >>> c = Counter() >>> c.value() 0 >>> c += 3 >>> c.value() 3 >>> c.incr() >>> c.value() == 4 True >>> c == 4 True >>> c != 9 True """ _count = 0 def __init__(self, initialValue=0, maxval=None): self._count = initialValue self.maxval = maxval def __iadd__(self, other): """add other to my value and return self >>> c = Counter(100) >>> c += 333 >>> c == 433 True """ if self.maxval is not None and ((self._count + other) > self.maxval): raise ValueError, "sorry, counter got too big" else: self._count += other return self def __eq__(self, other): """equality operator, compare other to my value() >>> c = Counter() >>> c == 0 True >>> c += 10 >>> c.incr() >>> c == 10 # fail this test on purpose True """ return self._count == other def __ne__(self, other): """inequality operator >>> c = Counter() >>> c != 10 True """ return not self.__eq__(other) def incr(self): """increment my value by 1 >>> from twisted.trial.test.mockdoctest import Counter >>> c = Counter(10, 11) >>> c.incr() >>> c.value() == 11 True >>> c.incr() Traceback (most recent call last): File "<stdin>", line 1, in ? File "twisted/trial/test/mockdoctest.py", line 51, in incr self.__iadd__(1) File "twisted/trial/test/mockdoctest.py", line 39, in __iadd__ raise ValueError, "sorry, counter got too big" ValueError: sorry, counter got too big """ self.__iadd__(1) def value(self): """return this counter's value >>> c = Counter(555) >>> c.value() == 555 True """ return self._count def unexpectedException(self): """i will raise an unexpected exception... ... *CAUSE THAT'S THE KINDA GUY I AM* >>> 1/0 """
apache-2.0
eBay/cronus-agent
agent/agent/lib/agent_thread/deactivate_manifest.py
1
5040
#pylint: disable=W0703,R0912,R0915,R0904,W0105 ''' Copyright 2014 eBay Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' """ Thread to perform creation of a service """ import os import shutil import traceback from agent.lib.utils import islink from agent.lib.utils import readlink from agent.lib.errors import Errors from agent.lib.errors import AgentException from agent.controllers.service import ServiceController import logging from agent.lib.agent_thread.manifest_control import ManifestControl from agent.lib import manifestutil class DeactivateManifest(ManifestControl): """ This thread will attempt to activate a manifest This means going throuh each package call the stop call the deactivate delete the active link call the activate create the active link call start """ THREAD_NAME = 'deactivate_manifest' def __init__(self, threadMgr, service): """ Constructor """ ManifestControl.__init__(self, threadMgr, service, manifest = None, name = 'deactivate_manifest') self.setName(DeactivateManifest.THREAD_NAME) self.__LOG = manifestutil.getServiceLogger(self, logging.getLogger(__name__)) def doRun(self): """ Main body of the thread """ errorMsg = "" errorCode = None failed = False try: activePath = os.path.join(ServiceController.manifestPath(self._service), 'active') oldManifest = None # make sure that if the active path exists, it's a link # if not log that and delete the link if (os.path.exists(activePath) and not os.name == 'nt' and not islink(activePath)): self.__LOG.error('%s is not a link. Attempted to delete' % activePath) shutil.rmtree(activePath) if (os.path.exists(activePath)): oldManifest = os.path.basename(readlink(activePath)) else: raise AgentException(error = Errors.ACTIVEMANIFEST_MANIFEST_MISSING, errorMsg = 'No active manifest - cannot deactivate service') self.__deactivateManifest(self._service, oldManifest) self.__removeSymlink(self._service) except SystemExit as exc: failed = True if (len(exc.args) == 2): # ok we got {err code, err msg} errorCode = exc.args[0] errorMsg = exc.args[1] raise exc except AgentException as exc: failed = True errorMsg = 'Deactivate Manifest - Agent Exception - %s' % exc.getMsg() errorCode = exc.getCode() except Exception as exc: failed = True errorMsg = 'Deactivate Manifest - Unknown error - (%s) - %s - %s' \ % (self._service, str(exc), traceback.format_exc(5)) errorCode = Errors.UNKNOWN_ERROR finally: if failed: self.__LOG.warning(errorMsg) self._updateStatus(httpStatus = 500, error = errorCode, errorMsg = errorMsg) self.__LOG.debug('Done: activate manifest for (%s)' % (self._service)) self._updateProgress(100) def __deactivateManifest(self, service, manifest): """ deactive a manifest. This means calling stop then deactive on the manifest @param service - service of manifest to deactivate @param manifest - manifest to deactivate @param stack - stack for recovery """ self.__LOG.debug("Deactivate Manifest %s-%s" % (service, manifest)) if (manifest == None): return self._execPackages('shutdown', service, manifest, 11, 25, activateFlow = False) self._execPackages('deactivate', service, manifest, 26, 50, activateFlow = False) manifestutil.processControllerInPackage(service, manifest, activateFlow = False) def __removeSymlink(self, service): """ remove symlink """ #remove symlink activePath = self.__getSymlinkPath(service) if os.path.exists(activePath): if (os.path.islink(activePath)): # *nix os.remove(activePath) else: raise AgentException('Running platform seems to be neither win32 nor *nix with any (sym)link support. Can\'t proceed with link deletion') def __getSymlinkPath(self, service): """ return symlink path for a service """ return os.path.join(ServiceController.manifestPath(service), 'active')
apache-2.0
martbhell/wasthereannhlgamelastnight
src/lib/oauthlib/oauth1/rfc5849/request_validator.py
6
31092
# -*- coding: utf-8 -*- """ oauthlib.oauth1.rfc5849 ~~~~~~~~~~~~~~ This module is an implementation of various logic needed for signing and checking OAuth 1.0 RFC 5849 requests. """ from __future__ import absolute_import, unicode_literals import sys from . import SIGNATURE_METHODS, utils class RequestValidator(object): """A validator/datastore interaction base class for OAuth 1 providers. OAuth providers should inherit from RequestValidator and implement the methods and properties outlined below. Further details are provided in the documentation for each method and property. Methods used to check the format of input parameters. Common tests include length, character set, membership, range or pattern. These tests are referred to as `whitelisting or blacklisting`_. Whitelisting is better but blacklisting can be usefull to spot malicious activity. The following have methods a default implementation: - check_client_key - check_request_token - check_access_token - check_nonce - check_verifier - check_realms The methods above default to whitelist input parameters, checking that they are alphanumerical and between a minimum and maximum length. Rather than overloading the methods a few properties can be used to configure these methods. * @safe_characters -> (character set) * @client_key_length -> (min, max) * @request_token_length -> (min, max) * @access_token_length -> (min, max) * @nonce_length -> (min, max) * @verifier_length -> (min, max) * @realms -> [list, of, realms] Methods used to validate/invalidate input parameters. These checks usually hit either persistent or temporary storage such as databases or the filesystem. See each methods documentation for detailed usage. The following methods must be implemented: - validate_client_key - validate_request_token - validate_access_token - validate_timestamp_and_nonce - validate_redirect_uri - validate_requested_realms - validate_realms - validate_verifier - invalidate_request_token Methods used to retrieve sensitive information from storage. The following methods must be implemented: - get_client_secret - get_request_token_secret - get_access_token_secret - get_rsa_key - get_realms - get_default_realms - get_redirect_uri Methods used to save credentials. The following methods must be implemented: - save_request_token - save_verifier - save_access_token Methods used to verify input parameters. This methods are used during authorizing request token by user (AuthorizationEndpoint), to check if parameters are valid. During token authorization request is not signed, thus 'validation' methods can not be used. The following methods must be implemented: - verify_realms - verify_request_token To prevent timing attacks it is necessary to not exit early even if the client key or resource owner key is invalid. Instead dummy values should be used during the remaining verification process. It is very important that the dummy client and token are valid input parameters to the methods get_client_secret, get_rsa_key and get_(access/request)_token_secret and that the running time of those methods when given a dummy value remain equivalent to the running time when given a valid client/resource owner. The following properties must be implemented: * @dummy_client * @dummy_request_token * @dummy_access_token Example implementations have been provided, note that the database used is a simple dictionary and serves only an illustrative purpose. Use whichever database suits your project and how to access it is entirely up to you. The methods are introduced in an order which should make understanding their use more straightforward and as such it could be worth reading what follows in chronological order. .. _`whitelisting or blacklisting`: https://www.schneier.com/blog/archives/2011/01/whitelisting_vs.html """ def __init__(self): pass @property def allowed_signature_methods(self): return SIGNATURE_METHODS @property def safe_characters(self): return set(utils.UNICODE_ASCII_CHARACTER_SET) @property def client_key_length(self): return 20, 30 @property def request_token_length(self): return 20, 30 @property def access_token_length(self): return 20, 30 @property def timestamp_lifetime(self): return 600 @property def nonce_length(self): return 20, 30 @property def verifier_length(self): return 20, 30 @property def realms(self): return [] @property def enforce_ssl(self): return True def check_client_key(self, client_key): """Check that the client key only contains safe characters and is no shorter than lower and no longer than upper. """ lower, upper = self.client_key_length return (set(client_key) <= self.safe_characters and lower <= len(client_key) <= upper) def check_request_token(self, request_token): """Checks that the request token contains only safe characters and is no shorter than lower and no longer than upper. """ lower, upper = self.request_token_length return (set(request_token) <= self.safe_characters and lower <= len(request_token) <= upper) def check_access_token(self, request_token): """Checks that the token contains only safe characters and is no shorter than lower and no longer than upper. """ lower, upper = self.access_token_length return (set(request_token) <= self.safe_characters and lower <= len(request_token) <= upper) def check_nonce(self, nonce): """Checks that the nonce only contains only safe characters and is no shorter than lower and no longer than upper. """ lower, upper = self.nonce_length return (set(nonce) <= self.safe_characters and lower <= len(nonce) <= upper) def check_verifier(self, verifier): """Checks that the verifier contains only safe characters and is no shorter than lower and no longer than upper. """ lower, upper = self.verifier_length return (set(verifier) <= self.safe_characters and lower <= len(verifier) <= upper) def check_realms(self, realms): """Check that the realm is one of a set allowed realms.""" return all((r in self.realms for r in realms)) def _subclass_must_implement(self, fn): """ Returns a NotImplementedError for a function that should be implemented. :param fn: name of the function """ m = "Missing function implementation in {}: {}".format(type(self), fn) return NotImplementedError(m) @property def dummy_client(self): """Dummy client used when an invalid client key is supplied. :returns: The dummy client key string. The dummy client should be associated with either a client secret, a rsa key or both depending on which signature methods are supported. Providers should make sure that get_client_secret(dummy_client) get_rsa_key(dummy_client) return a valid secret or key for the dummy client. This method is used by * AccessTokenEndpoint * RequestTokenEndpoint * ResourceEndpoint * SignatureOnlyEndpoint """ raise self._subclass_must_implement("dummy_client") @property def dummy_request_token(self): """Dummy request token used when an invalid token was supplied. :returns: The dummy request token string. The dummy request token should be associated with a request token secret such that get_request_token_secret(.., dummy_request_token) returns a valid secret. This method is used by * AccessTokenEndpoint """ raise self._subclass_must_implement("dummy_request_token") @property def dummy_access_token(self): """Dummy access token used when an invalid token was supplied. :returns: The dummy access token string. The dummy access token should be associated with an access token secret such that get_access_token_secret(.., dummy_access_token) returns a valid secret. This method is used by * ResourceEndpoint """ raise self._subclass_must_implement("dummy_access_token") def get_client_secret(self, client_key, request): """Retrieves the client secret associated with the client key. :param client_key: The client/consumer key. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: The client secret as a string. This method must allow the use of a dummy client_key value. Fetching the secret using the dummy key must take the same amount of time as fetching a secret for a valid client:: # Unlikely to be near constant time as it uses two database # lookups for a valid client, and only one for an invalid. from your_datastore import ClientSecret if ClientSecret.has(client_key): return ClientSecret.get(client_key) else: return 'dummy' # Aim to mimic number of latency inducing operations no matter # whether the client is valid or not. from your_datastore import ClientSecret return ClientSecret.get(client_key, 'dummy') Note that the returned key must be in plaintext. This method is used by * AccessTokenEndpoint * RequestTokenEndpoint * ResourceEndpoint * SignatureOnlyEndpoint """ raise self._subclass_must_implement('get_client_secret') def get_request_token_secret(self, client_key, token, request): """Retrieves the shared secret associated with the request token. :param client_key: The client/consumer key. :param token: The request token string. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: The token secret as a string. This method must allow the use of a dummy values and the running time must be roughly equivalent to that of the running time of valid values:: # Unlikely to be near constant time as it uses two database # lookups for a valid client, and only one for an invalid. from your_datastore import RequestTokenSecret if RequestTokenSecret.has(client_key): return RequestTokenSecret.get((client_key, request_token)) else: return 'dummy' # Aim to mimic number of latency inducing operations no matter # whether the client is valid or not. from your_datastore import RequestTokenSecret return ClientSecret.get((client_key, request_token), 'dummy') Note that the returned key must be in plaintext. This method is used by * AccessTokenEndpoint """ raise self._subclass_must_implement('get_request_token_secret') def get_access_token_secret(self, client_key, token, request): """Retrieves the shared secret associated with the access token. :param client_key: The client/consumer key. :param token: The access token string. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: The token secret as a string. This method must allow the use of a dummy values and the running time must be roughly equivalent to that of the running time of valid values:: # Unlikely to be near constant time as it uses two database # lookups for a valid client, and only one for an invalid. from your_datastore import AccessTokenSecret if AccessTokenSecret.has(client_key): return AccessTokenSecret.get((client_key, request_token)) else: return 'dummy' # Aim to mimic number of latency inducing operations no matter # whether the client is valid or not. from your_datastore import AccessTokenSecret return ClientSecret.get((client_key, request_token), 'dummy') Note that the returned key must be in plaintext. This method is used by * ResourceEndpoint """ raise self._subclass_must_implement("get_access_token_secret") def get_default_realms(self, client_key, request): """Get the default realms for a client. :param client_key: The client/consumer key. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: The list of default realms associated with the client. The list of default realms will be set during client registration and is outside the scope of OAuthLib. This method is used by * RequestTokenEndpoint """ raise self._subclass_must_implement("get_default_realms") def get_realms(self, token, request): """Get realms associated with a request token. :param token: The request token string. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: The list of realms associated with the request token. This method is used by * AuthorizationEndpoint * AccessTokenEndpoint """ raise self._subclass_must_implement("get_realms") def get_redirect_uri(self, token, request): """Get the redirect URI associated with a request token. :param token: The request token string. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: The redirect URI associated with the request token. It may be desirable to return a custom URI if the redirect is set to "oob". In this case, the user will be redirected to the returned URI and at that endpoint the verifier can be displayed. This method is used by * AuthorizationEndpoint """ raise self._subclass_must_implement("get_redirect_uri") def get_rsa_key(self, client_key, request): """Retrieves a previously stored client provided RSA key. :param client_key: The client/consumer key. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: The rsa public key as a string. This method must allow the use of a dummy client_key value. Fetching the rsa key using the dummy key must take the same amount of time as fetching a key for a valid client. The dummy key must also be of the same bit length as client keys. Note that the key must be returned in plaintext. This method is used by * AccessTokenEndpoint * RequestTokenEndpoint * ResourceEndpoint * SignatureOnlyEndpoint """ raise self._subclass_must_implement("get_rsa_key") def invalidate_request_token(self, client_key, request_token, request): """Invalidates a used request token. :param client_key: The client/consumer key. :param request_token: The request token string. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: None Per `Section 2.3`__ of the spec: "The server MUST (...) ensure that the temporary credentials have not expired or been used before." .. _`Section 2.3`: https://tools.ietf.org/html/rfc5849#section-2.3 This method should ensure that provided token won't validate anymore. It can be simply removing RequestToken from storage or setting specific flag that makes it invalid (note that such flag should be also validated during request token validation). This method is used by * AccessTokenEndpoint """ raise self._subclass_must_implement("invalidate_request_token") def validate_client_key(self, client_key, request): """Validates that supplied client key is a registered and valid client. :param client_key: The client/consumer key. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: True or False Note that if the dummy client is supplied it should validate in same or nearly the same amount of time as a valid one. Ensure latency inducing tasks are mimiced even for dummy clients. For example, use:: from your_datastore import Client try: return Client.exists(client_key, access_token) except DoesNotExist: return False Rather than:: from your_datastore import Client if access_token == self.dummy_access_token: return False else: return Client.exists(client_key, access_token) This method is used by * AccessTokenEndpoint * RequestTokenEndpoint * ResourceEndpoint * SignatureOnlyEndpoint """ raise self._subclass_must_implement("validate_client_key") def validate_request_token(self, client_key, token, request): """Validates that supplied request token is registered and valid. :param client_key: The client/consumer key. :param token: The request token string. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: True or False Note that if the dummy request_token is supplied it should validate in the same nearly the same amount of time as a valid one. Ensure latency inducing tasks are mimiced even for dummy clients. For example, use:: from your_datastore import RequestToken try: return RequestToken.exists(client_key, access_token) except DoesNotExist: return False Rather than:: from your_datastore import RequestToken if access_token == self.dummy_access_token: return False else: return RequestToken.exists(client_key, access_token) This method is used by * AccessTokenEndpoint """ raise self._subclass_must_implement("validate_request_token") def validate_access_token(self, client_key, token, request): """Validates that supplied access token is registered and valid. :param client_key: The client/consumer key. :param token: The access token string. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: True or False Note that if the dummy access token is supplied it should validate in the same or nearly the same amount of time as a valid one. Ensure latency inducing tasks are mimiced even for dummy clients. For example, use:: from your_datastore import AccessToken try: return AccessToken.exists(client_key, access_token) except DoesNotExist: return False Rather than:: from your_datastore import AccessToken if access_token == self.dummy_access_token: return False else: return AccessToken.exists(client_key, access_token) This method is used by * ResourceEndpoint """ raise self._subclass_must_implement("validate_access_token") def validate_timestamp_and_nonce(self, client_key, timestamp, nonce, request, request_token=None, access_token=None): """Validates that the nonce has not been used before. :param client_key: The client/consumer key. :param timestamp: The ``oauth_timestamp`` parameter. :param nonce: The ``oauth_nonce`` parameter. :param request_token: Request token string, if any. :param access_token: Access token string, if any. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: True or False Per `Section 3.3`_ of the spec. "A nonce is a random string, uniquely generated by the client to allow the server to verify that a request has never been made before and helps prevent replay attacks when requests are made over a non-secure channel. The nonce value MUST be unique across all requests with the same timestamp, client credentials, and token combinations." .. _`Section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3 One of the first validation checks that will be made is for the validity of the nonce and timestamp, which are associated with a client key and possibly a token. If invalid then immediately fail the request by returning False. If the nonce/timestamp pair has been used before and you may just have detected a replay attack. Therefore it is an essential part of OAuth security that you not allow nonce/timestamp reuse. Note that this validation check is done before checking the validity of the client and token.:: nonces_and_timestamps_database = [ (u'foo', 1234567890, u'rannoMstrInghere', u'bar') ] def validate_timestamp_and_nonce(self, client_key, timestamp, nonce, request_token=None, access_token=None): return ((client_key, timestamp, nonce, request_token or access_token) not in self.nonces_and_timestamps_database) This method is used by * AccessTokenEndpoint * RequestTokenEndpoint * ResourceEndpoint * SignatureOnlyEndpoint """ raise self._subclass_must_implement("validate_timestamp_and_nonce") def validate_redirect_uri(self, client_key, redirect_uri, request): """Validates the client supplied redirection URI. :param client_key: The client/consumer key. :param redirect_uri: The URI the client which to redirect back to after authorization is successful. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: True or False It is highly recommended that OAuth providers require their clients to register all redirection URIs prior to using them in requests and register them as absolute URIs. See `CWE-601`_ for more information about open redirection attacks. By requiring registration of all redirection URIs it should be straightforward for the provider to verify whether the supplied redirect_uri is valid or not. Alternatively per `Section 2.1`_ of the spec: "If the client is unable to receive callbacks or a callback URI has been established via other means, the parameter value MUST be set to "oob" (case sensitive), to indicate an out-of-band configuration." .. _`CWE-601`: http://cwe.mitre.org/top25/index.html#CWE-601 .. _`Section 2.1`: https://tools.ietf.org/html/rfc5849#section-2.1 This method is used by * RequestTokenEndpoint """ raise self._subclass_must_implement("validate_redirect_uri") def validate_requested_realms(self, client_key, realms, request): """Validates that the client may request access to the realm. :param client_key: The client/consumer key. :param realms: The list of realms that client is requesting access to. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: True or False This method is invoked when obtaining a request token and should tie a realm to the request token and after user authorization this realm restriction should transfer to the access token. This method is used by * RequestTokenEndpoint """ raise self._subclass_must_implement("validate_requested_realms") def validate_realms(self, client_key, token, request, uri=None, realms=None): """Validates access to the request realm. :param client_key: The client/consumer key. :param token: A request token string. :param request: OAuthlib request. :type request: oauthlib.common.Request :param uri: The URI the realms is protecting. :param realms: A list of realms that must have been granted to the access token. :returns: True or False How providers choose to use the realm parameter is outside the OAuth specification but it is commonly used to restrict access to a subset of protected resources such as "photos". realms is a convenience parameter which can be used to provide a per view method pre-defined list of allowed realms. Can be as simple as:: from your_datastore import RequestToken request_token = RequestToken.get(token, None) if not request_token: return False return set(request_token.realms).issuperset(set(realms)) This method is used by * ResourceEndpoint """ raise self._subclass_must_implement("validate_realms") def validate_verifier(self, client_key, token, verifier, request): """Validates a verification code. :param client_key: The client/consumer key. :param token: A request token string. :param verifier: The authorization verifier string. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: True or False OAuth providers issue a verification code to clients after the resource owner authorizes access. This code is used by the client to obtain token credentials and the provider must verify that the verifier is valid and associated with the client as well as the resource owner. Verifier validation should be done in near constant time (to avoid verifier enumeration). To achieve this we need a constant time string comparison which is provided by OAuthLib in ``oauthlib.common.safe_string_equals``:: from your_datastore import Verifier correct_verifier = Verifier.get(client_key, request_token) from oauthlib.common import safe_string_equals return safe_string_equals(verifier, correct_verifier) This method is used by * AccessTokenEndpoint """ raise self._subclass_must_implement("validate_verifier") def verify_request_token(self, token, request): """Verify that the given OAuth1 request token is valid. :param token: A request token string. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: True or False This method is used only in AuthorizationEndpoint to check whether the oauth_token given in the authorization URL is valid or not. This request is not signed and thus similar ``validate_request_token`` method can not be used. This method is used by * AuthorizationEndpoint """ raise self._subclass_must_implement("verify_request_token") def verify_realms(self, token, realms, request): """Verify authorized realms to see if they match those given to token. :param token: An access token string. :param realms: A list of realms the client attempts to access. :param request: OAuthlib request. :type request: oauthlib.common.Request :returns: True or False This prevents the list of authorized realms sent by the client during the authorization step to be altered to include realms outside what was bound with the request token. Can be as simple as:: valid_realms = self.get_realms(token) return all((r in valid_realms for r in realms)) This method is used by * AuthorizationEndpoint """ raise self._subclass_must_implement("verify_realms") def save_access_token(self, token, request): """Save an OAuth1 access token. :param token: A dict with token credentials. :param request: OAuthlib request. :type request: oauthlib.common.Request The token dictionary will at minimum include * ``oauth_token`` the access token string. * ``oauth_token_secret`` the token specific secret used in signing. * ``oauth_authorized_realms`` a space separated list of realms. Client key can be obtained from ``request.client_key``. The list of realms (not joined string) can be obtained from ``request.realm``. This method is used by * AccessTokenEndpoint """ raise self._subclass_must_implement("save_access_token") def save_request_token(self, token, request): """Save an OAuth1 request token. :param token: A dict with token credentials. :param request: OAuthlib request. :type request: oauthlib.common.Request The token dictionary will at minimum include * ``oauth_token`` the request token string. * ``oauth_token_secret`` the token specific secret used in signing. * ``oauth_callback_confirmed`` the string ``true``. Client key can be obtained from ``request.client_key``. This method is used by * RequestTokenEndpoint """ raise self._subclass_must_implement("save_request_token") def save_verifier(self, token, verifier, request): """Associate an authorization verifier with a request token. :param token: A request token string. :param verifier A dictionary containing the oauth_verifier and oauth_token :param request: OAuthlib request. :type request: oauthlib.common.Request We need to associate verifiers with tokens for validation during the access token request. Note that unlike save_x_token token here is the ``oauth_token`` token string from the request token saved previously. This method is used by * AuthorizationEndpoint """ raise self._subclass_must_implement("save_verifier")
mit
slightstone/SickRage
sickbeard/notifiers/pytivo.py
13
3585
# Author: Nic Wolfe <nic@wolfeden.ca> # URL: http://code.google.com/p/sickbeard/ # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. import os import sickbeard from urllib import urlencode from urllib2 import Request, urlopen, HTTPError from sickbeard import logger from sickbeard.exceptions import ex from sickbeard import encodingKludge as ek class pyTivoNotifier: def notify_snatch(self, ep_name): pass def notify_download(self, ep_name): pass def notify_subtitle_download(self, ep_name, lang): pass def notify_git_update(self, new_version): pass def update_library(self, ep_obj): # Values from config if not sickbeard.USE_PYTIVO: return False host = sickbeard.PYTIVO_HOST shareName = sickbeard.PYTIVO_SHARE_NAME tsn = sickbeard.PYTIVO_TIVO_NAME # There are two more values required, the container and file. # # container: The share name, show name and season # # file: The file name # # Some slicing and dicing of variables is required to get at these values. # # There might be better ways to arrive at the values, but this is the best I have been able to # come up with. # # Calculated values showPath = ep_obj.show.location showName = ep_obj.show.name rootShowAndSeason = ek.ek(os.path.dirname, ep_obj.location) absPath = ep_obj.location # Some show names have colons in them which are illegal in a path location, so strip them out. # (Are there other characters?) showName = showName.replace(":", "") root = showPath.replace(showName, "") showAndSeason = rootShowAndSeason.replace(root, "") container = shareName + "/" + showAndSeason file = "/" + absPath.replace(root, "") # Finally create the url and make request requestUrl = "http://" + host + "/TiVoConnect?" + urlencode( {'Command': 'Push', 'Container': container, 'File': file, 'tsn': tsn}) logger.log(u"pyTivo notification: Requesting " + requestUrl, logger.DEBUG) request = Request(requestUrl) try: response = urlopen(request) #@UnusedVariable except HTTPError , e: if hasattr(e, 'reason'): logger.log(u"pyTivo notification: Error, failed to reach a server - " + e.reason, logger.ERROR) return False elif hasattr(e, 'code'): logger.log(u"pyTivo notification: Error, the server couldn't fulfill the request - " + e.code, logger.ERROR) return False except Exception, e: logger.log(u"PYTIVO: Unknown exception: " + ex(e), logger.ERROR) return False else: logger.log(u"pyTivo notification: Successfully requested transfer of file") return True notifier = pyTivoNotifier
gpl-3.0
prestona/qpid-proton
proton-c/bindings/python/proton/wrapper.py
4
3470
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # from cproton import * class EmptyAttrs: def __contains__(self, name): return False def __getitem__(self, name): raise KeyError(name) def __setitem__(self, name, value): raise TypeError("does not support item assignment") EMPTY_ATTRS = EmptyAttrs() class Wrapper(object): def __init__(self, impl_or_constructor, get_context=None): init = False if callable(impl_or_constructor): # we are constructing a new object impl = impl_or_constructor() init = True else: # we are wrapping an existing object impl = impl_or_constructor pn_incref(impl) if get_context: record = get_context(impl) attrs = pn_void2py(pn_record_get(record, PYCTX)) if attrs is None: attrs = {} pn_record_def(record, PYCTX, PN_PYREF) pn_record_set(record, PYCTX, pn_py2void(attrs)) init = True else: attrs = EMPTY_ATTRS init = False record = None self.__dict__["_impl"] = impl self.__dict__["_attrs"] = attrs self.__dict__["_record"] = record if init: self._init() def __getattr__(self, name): attrs = self.__dict__["_attrs"] if name in attrs: return attrs[name] else: raise AttributeError(name + " not in _attrs") def __setattr__(self, name, value): if hasattr(self.__class__, name): object.__setattr__(self, name, value) else: attrs = self.__dict__["_attrs"] attrs[name] = value def __delattr__(self, name): attrs = self.__dict__["_attrs"] if attrs: del attrs[name] def __hash__(self): return hash(addressof(self._impl)) def __eq__(self, other): if isinstance(other, Wrapper): return addressof(self._impl) == addressof(other._impl) return False def __ne__(self, other): if isinstance(other, Wrapper): return addressof(self._impl) != addressof(other._impl) return True def __del__(self): pn_decref(self._impl) def __repr__(self): return '<%s.%s 0x%x ~ 0x%x>' % (self.__class__.__module__, self.__class__.__name__, id(self), addressof(self._impl)) if pn_py2void(Wrapper) is Wrapper: PYCTX = Wrapper import java.lang.System addressof = java.lang.System.identityHashCode else: PYCTX = int(pn_py2void(Wrapper)) addressof = int
apache-2.0
AustereCuriosity/astropy
astropy/modeling/tests/test_quantities_fitting.py
2
4268
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Tests that relate to fitting models with quantity parameters """ from __future__ import (absolute_import, unicode_literals, division, print_function) import numpy as np from ..models import Gaussian1D from ... import units as u from ...units import UnitsError from ...tests.helper import pytest, assert_quantity_allclose from ...utils import NumpyRNGContext from .. import fitting try: from scipy import optimize HAS_SCIPY = True except ImportError: HAS_SCIPY = False # Fitting should be as intuitive as possible to the user. Essentially, models # and fitting should work without units, but if one has units, the other should # have units too, and the resulting fitted parameters will also have units. def _fake_gaussian_data(): # Generate fake data with NumpyRNGContext(12345): x = np.linspace(-5., 5., 2000) y = 3 * np.exp(-0.5 * (x - 1.3)**2 / 0.8**2) y += np.random.normal(0., 0.2, x.shape) # Attach units to data x = x * u.m y = y * u.Jy return x, y @pytest.mark.skipif('not HAS_SCIPY') def test_fitting_simple(): x, y = _fake_gaussian_data() # Fit the data using a Gaussian with units g_init = Gaussian1D() fit_g = fitting.LevMarLSQFitter() g = fit_g(g_init, x, y) # TODO: update actual numerical results once implemented, but these should # be close to the values below. assert_quantity_allclose(g.amplitude, 3 * u.Jy, rtol=0.05) assert_quantity_allclose(g.mean, 1.3 * u.m, rtol=0.05) assert_quantity_allclose(g.stddev, 0.8 * u.m, rtol=0.05) @pytest.mark.skipif('not HAS_SCIPY') def test_fitting_with_initial_values(): x, y = _fake_gaussian_data() # Fit the data using a Gaussian with units g_init = Gaussian1D(amplitude=1. * u.mJy, mean=3 * u.cm, stddev=2 * u.mm) fit_g = fitting.LevMarLSQFitter() g = fit_g(g_init, x, y) # TODO: update actual numerical results once implemented, but these should # be close to the values below. assert_quantity_allclose(g.amplitude, 3 * u.Jy, rtol=0.05) assert_quantity_allclose(g.mean, 1.3 * u.m, rtol=0.05) assert_quantity_allclose(g.stddev, 0.8 * u.m, rtol=0.05) @pytest.mark.skipif('not HAS_SCIPY') def test_fitting_missing_data_units(): """ Raise an error if the model has units but the data doesn't """ g_init = Gaussian1D(amplitude=1. * u.mJy, mean=3 * u.cm, stddev=2 * u.mm) fit_g = fitting.LevMarLSQFitter() with pytest.raises(UnitsError) as exc: fit_g(g_init, [1, 2, 3], [4, 5, 6]) assert exc.value.args[0] == ("'cm' (length) and '' (dimensionless) are not " "convertible") with pytest.raises(UnitsError) as exc: fit_g(g_init, [1, 2, 3] * u.m, [4, 5, 6]) assert exc.value.args[0] == ("'mJy' (spectral flux density) and '' " "(dimensionless) are not convertible") @pytest.mark.skipif('not HAS_SCIPY') def test_fitting_missing_model_units(): """ Proceed if the data has units but the model doesn't """ x, y = _fake_gaussian_data() g_init = Gaussian1D(amplitude=1., mean=3, stddev=2) fit_g = fitting.LevMarLSQFitter() g = fit_g(g_init, x, y) assert_quantity_allclose(g.amplitude, 3 * u.Jy, rtol=0.05) assert_quantity_allclose(g.mean, 1.3 * u.m, rtol=0.05) assert_quantity_allclose(g.stddev, 0.8 * u.m, rtol=0.05) g_init = Gaussian1D(amplitude=1., mean=3 * u.m, stddev=2 * u.m) fit_g = fitting.LevMarLSQFitter() g = fit_g(g_init, x, y) assert_quantity_allclose(g.amplitude, 3 * u.Jy, rtol=0.05) assert_quantity_allclose(g.mean, 1.3 * u.m, rtol=0.05) assert_quantity_allclose(g.stddev, 0.8 * u.m, rtol=0.05) @pytest.mark.skipif('not HAS_SCIPY') def test_fitting_incompatible_units(): """ Raise an error if the data and model have incompatible units """ g_init = Gaussian1D(amplitude=1. * u.Jy, mean=3 * u.m, stddev=2 * u.cm) fit_g = fitting.LevMarLSQFitter() with pytest.raises(UnitsError) as exc: fit_g(g_init, [1, 2, 3] * u.Hz, [4, 5, 6] * u.Jy) assert exc.value.args[0] == ("'Hz' (frequency) and 'm' (length) are not convertible")
bsd-3-clause
pbrunet/pythran
pythran/tests/openmp.legacy/omp_sections_reduction.py
4
9174
def omp_sections_reduction(): import math dt = 0.5 rounding_error = 1.E-9 sum = 7 dsum = 0 dt = 1. / 3. result = True product = 1 logic_and = 1 logic_or = 0 bit_and = 1 bit_or = 0 i = 0 exclusiv_bit_or = 0 known_sum = (1000 * 999) / 2 + 7 if 'omp parallel': if 'omp sections private(i) reduction(+:sum)': if 'omp section': for i in xrange(1,300): sum += i if 'omp section': for i in xrange(300,700): sum += i if 'omp section': for i in xrange(700,1000): sum += i if known_sum != sum: print "E: reduction(+:sum)" result = False diff = (1000 * 999) / 2 if 'omp parallel': if 'omp sections private(i) reduction(-:diff)': if 'omp section': for i in xrange(1,300): diff -= i if 'omp section': for i in xrange(300,700): diff -= i if 'omp section': for i in xrange(700,1000): diff -= i if diff != 0: print "E: reduction(-:diff)" result = False dsum = 0 dpt = 0 for i in xrange(0, 20): dpt *= dt dknown_sum = (1 - dpt) / (1 - dt) if 'omp parallel': if 'omp sections private(i) reduction(+:dsum)': if 'omp section': for i in xrange(0,7): dsum += math.pow(dt, i) if 'omp section': for i in xrange(7,14): dsum += math.pow(dt, i) if 'omp section': for i in xrange(14,20): dsum += math.pow(dt, i) if abs(dsum-dknown_sum) > rounding_error: print "E: reduction(+:dsum)" result = False dsum = 0 dpt = 0 for i in xrange(0, 20): dpt *= dt ddiff = (1 - dpt) / (1 - dt) if 'omp parallel': if 'omp sections private(i) reduction(-:ddiff)': if 'omp section': for i in xrange(0,6): ddiff -= math.pow(dt, i) if 'omp section': for i in xrange(6,12): ddiff -= math.pow(dt, i) if 'omp section': for i in xrange(12,20): ddiff -= math.pow(dt, i) if abs(ddiff) > rounding_error: print "E: reduction(-:ddiff)" result = False if 'omp parallel': if 'omp sections private(i) reduction(*:product)': if 'omp section': for i in xrange(1,3): product *= i if 'omp section': for i in xrange(3,6): product *= i if 'omp section': for i in xrange(6,11): product *= i known_product = 3628800 if known_product != product: print "E: reduction(*:product)" result = False logics = [1 for i in xrange(0,1000)] if 'omp parallel': if 'omp sections private(i) reduction(&&:logic_and)': if 'omp section': for i in xrange(0, 300): logic_and = (logic_and and logics[i]) if 'omp section': for i in xrange(300, 700): logic_and = (logic_and and logics[i]) if 'omp section': for i in xrange(700, 1000): logic_and = (logic_and and logics[i]) if not logic_and: print "E: reduction(&&:logic_and)" result = False logic_and = 1; logics[1000/2]=0 if 'omp parallel': if 'omp sections private(i) reduction(&&:logic_and)': if 'omp section': for i in xrange(0, 300): logic_and = (logic_and and logics[i]) if 'omp section': for i in xrange(300, 700): logic_and = (logic_and and logics[i]) if 'omp section': for i in xrange(700, 1000): logic_and = (logic_and and logics[i]) if logic_and: print "E: reduction(&&:logic_and) with logics[1000/2]=0" result = False logics = [0 for i in xrange(0,1000)] if 'omp parallel': if 'omp sections private(i) reduction(||:logic_or)': if 'omp section': for i in xrange(0, 300): logic_or = (logic_or or logics[i]) if 'omp section': for i in xrange(300, 700): logic_or = (logic_or or logics[i]) if 'omp section': for i in xrange(700, 1000): logic_or = (logic_or or logics[i]) if logic_or: print "E: reduction(||:logic_or)" result = False logic_or = 0; logics[1000/2]=1 if 'omp parallel': if 'omp sections private(i) reduction(||:logic_or)': if 'omp section': for i in xrange(0, 300): logic_or = (logic_or or logics[i]) if 'omp section': for i in xrange(300, 700): logic_or = (logic_or or logics[i]) if 'omp section': for i in xrange(700, 1000): logic_or = (logic_or or logics[i]) if not logic_or: print "E: reduction(||:logic_or) with logics[1000/2]=1" result = False logics = [1 for i in xrange(0,1000)] if 'omp parallel': if 'omp sections private(i) reduction(&:bit_and)': if 'omp section': for i in xrange(0, 300): bit_and = (bit_and & logics[i]) if 'omp section': for i in xrange(300, 700): bit_and = (bit_and & logics[i]) if 'omp section': for i in xrange(700, 1000): bit_and = (bit_and & logics[i]) if not bit_and: print "E: reduction(&:bit_and)" result = False bit_and = 1; logics[1000/2]=0 if 'omp parallel': if 'omp sections private(i) reduction(&:bit_and)': if 'omp section': for i in xrange(0, 300): bit_and = (bit_and & logics[i]) if 'omp section': for i in xrange(300, 700): bit_and = (bit_and & logics[i]) if 'omp section': for i in xrange(700, 1000): bit_and = (bit_and & logics[i]) if bit_and: print "E: reduction(&:bit_and) with logics[1000/2]=0" result = False logics = [0 for i in xrange(0,1000)] if 'omp parallel': if 'omp sections private(i) reduction(|:bit_or)': if 'omp section': for i in xrange(0, 300): bit_or = (bit_or | logics[i]) if 'omp section': for i in xrange(300, 700): bit_or = (bit_or | logics[i]) if 'omp section': for i in xrange(700, 1000): bit_or = (bit_or | logics[i]) if bit_or: print "E: reduction(|:bit_or)" result = False bit_or = 0; logics[1000/2]=1 if 'omp parallel': if 'omp sections private(i) reduction(|:bit_or)': if 'omp section': for i in xrange(0, 300): bit_or = (bit_or | logics[i]) if 'omp section': for i in xrange(300, 700): bit_or = (bit_or | logics[i]) if 'omp section': for i in xrange(700, 1000): bit_or = (bit_or | logics[i]) if not bit_or: print "E: reduction(|:bit_or) with logics[1000/2]=1" result = False logics = [0 for i in xrange(0,1000)] if 'omp parallel': if 'omp sections private(i) reduction(^:exclusiv_bit_or)': if 'omp section': for i in xrange(0, 300): exclusiv_bit_or = (exclusiv_bit_or ^ logics[i]) if 'omp section': for i in xrange(300, 700): exclusiv_bit_or = (exclusiv_bit_or ^ logics[i]) if 'omp section': for i in xrange(700, 1000): exclusiv_bit_or = (exclusiv_bit_or ^ logics[i]) if exclusiv_bit_or: print "E: reduction(^:exclusiv_bit_or)" result = False exclusiv_bit_or = 0; logics[1000/2]=1 if 'omp parallel': if 'omp sections private(i) reduction(^:exclusiv_bit_or)': if 'omp section': for i in xrange(0, 300): exclusiv_bit_or = (exclusiv_bit_or ^ logics[i]) if 'omp section': for i in xrange(300, 700): exclusiv_bit_or = (exclusiv_bit_or ^ logics[i]) if 'omp section': for i in xrange(700, 1000): exclusiv_bit_or = (exclusiv_bit_or ^ logics[i]) if not exclusiv_bit_or: print "E: reduction(^:exclusiv_bit_or) with logics[1000/2]=1" result = False return result
bsd-3-clause
thonkify/thonkify
src/lib/Crypto/Protocol/KDF.py
123
5071
# # KDF.py : a collection of Key Derivation Functions # # Part of the Python Cryptography Toolkit # # =================================================================== # The contents of this file are dedicated to the public domain. To # the extent that dedication to the public domain is not available, # everyone is granted a worldwide, perpetual, royalty-free, # non-exclusive license to exercise all rights associated with the # contents of this file for any purpose whatsoever. # No rights are reserved. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # =================================================================== """This file contains a collection of standard key derivation functions. A key derivation function derives one or more secondary secret keys from one primary secret (a master key or a pass phrase). This is typically done to insulate the secondary keys from each other, to avoid that leakage of a secondary key compromises the security of the master key, or to thwart attacks on pass phrases (e.g. via rainbow tables). :undocumented: __revision__ """ __revision__ = "$Id$" import math import struct from Crypto.Util.py3compat import * from Crypto.Hash import SHA as SHA1, HMAC from Crypto.Util.strxor import strxor def PBKDF1(password, salt, dkLen, count=1000, hashAlgo=None): """Derive one key from a password (or passphrase). This function performs key derivation according an old version of the PKCS#5 standard (v1.5). This algorithm is called ``PBKDF1``. Even though it is still described in the latest version of the PKCS#5 standard (version 2, or RFC2898), newer applications should use the more secure and versatile `PBKDF2` instead. :Parameters: password : string The secret password or pass phrase to generate the key from. salt : byte string An 8 byte string to use for better protection from dictionary attacks. This value does not need to be kept secret, but it should be randomly chosen for each derivation. dkLen : integer The length of the desired key. Default is 16 bytes, suitable for instance for `Crypto.Cipher.AES`. count : integer The number of iterations to carry out. It's recommended to use at least 1000. hashAlgo : module The hash algorithm to use, as a module or an object from the `Crypto.Hash` package. The digest length must be no shorter than ``dkLen``. The default algorithm is `SHA1`. :Return: A byte string of length `dkLen` that can be used as key. """ if not hashAlgo: hashAlgo = SHA1 password = tobytes(password) pHash = hashAlgo.new(password+salt) digest = pHash.digest_size if dkLen>digest: raise ValueError("Selected hash algorithm has a too short digest (%d bytes)." % digest) if len(salt)!=8: raise ValueError("Salt is not 8 bytes long.") for i in xrange(count-1): pHash = pHash.new(pHash.digest()) return pHash.digest()[:dkLen] def PBKDF2(password, salt, dkLen=16, count=1000, prf=None): """Derive one or more keys from a password (or passphrase). This performs key derivation according to the PKCS#5 standard (v2.0), by means of the ``PBKDF2`` algorithm. :Parameters: password : string The secret password or pass phrase to generate the key from. salt : string A string to use for better protection from dictionary attacks. This value does not need to be kept secret, but it should be randomly chosen for each derivation. It is recommended to be at least 8 bytes long. dkLen : integer The cumulative length of the desired keys. Default is 16 bytes, suitable for instance for `Crypto.Cipher.AES`. count : integer The number of iterations to carry out. It's recommended to use at least 1000. prf : callable A pseudorandom function. It must be a function that returns a pseudorandom string from two parameters: a secret and a salt. If not specified, HMAC-SHA1 is used. :Return: A byte string of length `dkLen` that can be used as key material. If you wanted multiple keys, just break up this string into segments of the desired length. """ password = tobytes(password) if prf is None: prf = lambda p,s: HMAC.new(p,s,SHA1).digest() key = b('') i = 1 while len(key)<dkLen: U = previousU = prf(password,salt+struct.pack(">I", i)) for j in xrange(count-1): previousU = t = prf(password,previousU) U = strxor(U,t) key += U i = i + 1 return key[:dkLen]
mit
pchaigno/grreat
lib/time_utils.py
4
2416
#!/usr/bin/env python """This file contains various utilities for datetime handling.""" import calendar import datetime import re import time # Special Windows value for 'the beginning of time' NULL_FILETIME = datetime.datetime(1601, 1, 1, 0, 0, 0) # Regex for times in windows wmi converted format 20080726084622.375000+120 TIME_WMI_RE = re.compile(r"(?P<date>\d{14})\." # date then . r"(?P<subsecond>\d{6})[+-]" # secs then + or - r"(?P<tzoffset>\d{3})") # minute timezone offset def DatetimeToWmiTime(dt): """Take a datetime tuple and return it as yyyymmddHHMMSS.mmmmmm+UUU string. Args: dt: A datetime object. Returns: A string in CMI_DATETIME format. http://www.dmtf.org/sites/default/files/standards/documents/DSP0004_2.5.0.pdf """ td = dt.utcoffset() if td: offset = (td.seconds + (td.days * 60 * 60 * 24)) / 60 if offset >= 0: str_offset = "+%03d" % offset else: str_offset = "%03d" % offset else: str_offset = "+000" return u"%04d%02d%02d%02d%02d%02d.%06d%s" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.microsecond, str_offset) def WmiTimeToEpoch(cimdatetime_str): """Convert a CIM_DATETIME string to microseconds since epoch. Args: cimdatetime_str: A string in WMI format Returns: Microseconds since epoch as int or 0 on failure. http://www.dmtf.org/sites/default/files/standards/documents/DSP0004_2.5.0.pdf """ re_match = TIME_WMI_RE.match(cimdatetime_str) try: t_dict = re_match.groupdict() flt_time = time.strptime(t_dict["date"], "%Y%m%d%H%M%S") epoch_time = int(calendar.timegm(flt_time)) * 1000000 # Note that the tzoffset value is ignored, CIM_DATETIME stores in UTC epoch_time += int(t_dict["subsecond"]) return epoch_time except (KeyError, AttributeError): return 0 def WinFileTimeToDateTime(filetime): """Take a Windows FILETIME as integer and convert to DateTime.""" return NULL_FILETIME + datetime.timedelta(microseconds=filetime/10) def AmericanDateToEpoch(date_str): """Take a US format date and return epoch. Used for some broken WMI calls.""" try: epoch = time.strptime(date_str, "%m/%d/%Y") return int(calendar.timegm(epoch)) * 1000000 except ValueError: return 0
apache-2.0
Parisson/Telemeta
telemeta/south_migrations/0009_auto__chg_field_mediaitem_language_iso__del_field_userprofile_departem.py
2
45229
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Changing field 'MediaItem.language_iso' db.alter_column('media_items', 'language_iso_id', self.gf('telemeta.models.core.ForeignKey')(on_delete=models.SET_NULL, to=orm['telemeta.Language'], null=True)) # Deleting field 'UserProfile.departement' db.delete_column('profiles', 'departement') # Adding field 'UserProfile.department' db.add_column('profiles', 'department', self.gf('telemeta.models.core.CharField')(default='', max_length=250, blank=True), keep_default=False) def backwards(self, orm): # Changing field 'MediaItem.language_iso' db.alter_column('media_items', 'language_iso_id', self.gf('telemeta.models.core.ForeignKey')(null=True, to=orm['telemeta.Language'])) # Adding field 'UserProfile.departement' db.add_column('profiles', 'departement', self.gf('telemeta.models.core.CharField')(default='', max_length=250, blank=True), keep_default=False) # Deleting field 'UserProfile.department' db.delete_column('profiles', 'department') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'telemeta.acquisitionmode': { 'Meta': {'ordering': "['value']", 'object_name': 'AcquisitionMode', 'db_table': "'acquisition_modes'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.adconversion': { 'Meta': {'ordering': "['value']", 'object_name': 'AdConversion', 'db_table': "'ad_conversions'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.contextkeyword': { 'Meta': {'ordering': "['value']", 'object_name': 'ContextKeyword', 'db_table': "'context_keywords'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.criteria': { 'Meta': {'object_name': 'Criteria', 'db_table': "'search_criteria'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'key': ('telemeta.models.core.CharField', [], {'max_length': '250'}), 'value': ('telemeta.models.core.CharField', [], {'max_length': '250'}) }, 'telemeta.ethnicgroup': { 'Meta': {'ordering': "['value']", 'object_name': 'EthnicGroup', 'db_table': "'ethnic_groups'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.ethnicgroupalias': { 'Meta': {'ordering': "['ethnic_group__value']", 'unique_together': "(('ethnic_group', 'value'),)", 'object_name': 'EthnicGroupAlias', 'db_table': "'ethnic_group_aliases'"}, 'ethnic_group': ('telemeta.models.core.ForeignKey', [], {'related_name': "'aliases'", 'to': "orm['telemeta.EthnicGroup']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'max_length': '250'}) }, 'telemeta.format': { 'Meta': {'object_name': 'Format', 'db_table': "'media_formats'"}, 'comments': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'conservation_state': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'item': ('telemeta.models.core.ForeignKey', [], {'related_name': "'formats'", 'to': "orm['telemeta.MediaItem']"}), 'original_code': ('telemeta.models.core.CharField', [], {'max_length': '250'}), 'status': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'tape_diameter': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'tape_length': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'formats'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.TapeLength']"}), 'tape_number': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'tape_reference': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'tape_speed': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'formats'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.TapeSpeed']"}), 'tape_thickness': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'tape_vendor': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'formats'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.TapeVendor']"}), 'tape_width': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'formats'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.TapeWidth']"}) }, 'telemeta.genericstyle': { 'Meta': {'ordering': "['value']", 'object_name': 'GenericStyle', 'db_table': "'generic_styles'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.instrument': { 'Meta': {'object_name': 'Instrument', 'db_table': "'instruments'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('telemeta.models.core.CharField', [], {'max_length': '250'}) }, 'telemeta.instrumentalias': { 'Meta': {'object_name': 'InstrumentAlias', 'db_table': "'instrument_aliases'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('telemeta.models.core.CharField', [], {'max_length': '250'}) }, 'telemeta.instrumentaliasrelation': { 'Meta': {'unique_together': "(('alias', 'instrument'),)", 'object_name': 'InstrumentAliasRelation', 'db_table': "'instrument_alias_relations'"}, 'alias': ('telemeta.models.core.ForeignKey', [], {'related_name': "'other_name'", 'to': "orm['telemeta.InstrumentAlias']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'instrument': ('telemeta.models.core.ForeignKey', [], {'related_name': "'relation'", 'to': "orm['telemeta.InstrumentAlias']"}) }, 'telemeta.instrumentrelation': { 'Meta': {'unique_together': "(('instrument', 'parent_instrument'),)", 'object_name': 'InstrumentRelation', 'db_table': "'instrument_relations'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'instrument': ('telemeta.models.core.ForeignKey', [], {'related_name': "'parent_relation'", 'to': "orm['telemeta.Instrument']"}), 'parent_instrument': ('telemeta.models.core.ForeignKey', [], {'related_name': "'child_relation'", 'to': "orm['telemeta.Instrument']"}) }, 'telemeta.language': { 'Meta': {'ordering': "['name']", 'object_name': 'Language', 'db_table': "'languages'"}, 'comment': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'identifier': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '3', 'blank': 'True'}), 'name': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'part1': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '1', 'blank': 'True'}), 'part2B': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '3', 'blank': 'True'}), 'part2T': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '3', 'blank': 'True'}), 'scope': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '1', 'blank': 'True'}), 'type': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '1', 'blank': 'True'}) }, 'telemeta.legalright': { 'Meta': {'ordering': "['value']", 'object_name': 'LegalRight', 'db_table': "'legal_rights'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.location': { 'Meta': {'ordering': "['name']", 'object_name': 'Location', 'db_table': "'locations'"}, 'complete_type': ('telemeta.models.core.ForeignKey', [], {'related_name': "'locations'", 'to': "orm['telemeta.LocationType']"}), 'current_location': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'past_names'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.Location']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_authoritative': ('telemeta.models.core.BooleanField', [], {'default': 'False'}), 'latitude': ('telemeta.models.core.FloatField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'longitude': ('telemeta.models.core.FloatField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'name': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '150'}), 'type': ('telemeta.models.core.IntegerField', [], {'default': '0', 'db_index': 'True', 'blank': 'True'}) }, 'telemeta.locationalias': { 'Meta': {'ordering': "['alias']", 'unique_together': "(('location', 'alias'),)", 'object_name': 'LocationAlias', 'db_table': "'location_aliases'"}, 'alias': ('telemeta.models.core.CharField', [], {'max_length': '150'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_authoritative': ('telemeta.models.core.BooleanField', [], {'default': 'False'}), 'location': ('telemeta.models.core.ForeignKey', [], {'related_name': "'aliases'", 'to': "orm['telemeta.Location']"}) }, 'telemeta.locationrelation': { 'Meta': {'ordering': "['ancestor_location__name']", 'unique_together': "(('location', 'ancestor_location'),)", 'object_name': 'LocationRelation', 'db_table': "'location_relations'"}, 'ancestor_location': ('telemeta.models.core.ForeignKey', [], {'related_name': "'descendant_relations'", 'to': "orm['telemeta.Location']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_authoritative': ('telemeta.models.core.BooleanField', [], {'default': 'False'}), 'is_direct': ('telemeta.models.core.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'location': ('telemeta.models.core.ForeignKey', [], {'related_name': "'ancestor_relations'", 'to': "orm['telemeta.Location']"}) }, 'telemeta.locationtype': { 'Meta': {'ordering': "['name']", 'object_name': 'LocationType', 'db_table': "'location_types'"}, 'code': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '64'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('telemeta.models.core.CharField', [], {'max_length': '150'}) }, 'telemeta.mediacollection': { 'Meta': {'ordering': "['code']", 'object_name': 'MediaCollection', 'db_table': "'media_collections'"}, 'a_informer_07_03': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'acquisition_mode': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'collections'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.AcquisitionMode']"}), 'ad_conversion': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'collections'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.AdConversion']"}), 'alt_ids': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'alt_title': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'approx_duration': ('telemeta.models.core.DurationField', [], {'default': "'0'", 'blank': 'True'}), 'booklet_author': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'booklet_description': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'cnrs_contributor': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'code': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}), 'collector': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'collector_is_creator': ('telemeta.models.core.BooleanField', [], {'default': 'False'}), 'comment': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'conservation_site': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'creator': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'doctype_code': ('telemeta.models.core.IntegerField', [], {'default': '0', 'blank': 'True'}), 'external_references': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_published': ('telemeta.models.core.BooleanField', [], {'default': 'False'}), 'items_done': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'legal_rights': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'collections'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.LegalRight']"}), 'metadata_author': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'collections'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.MetadataAuthor']"}), 'metadata_writer': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'collections'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.MetadataWriter']"}), 'old_code': ('telemeta.models.core.CharField', [], {'default': 'None', 'max_length': '250', 'null': 'True', 'blank': 'True'}), 'physical_format': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'collections'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.PhysicalFormat']"}), 'physical_items_num': ('telemeta.models.core.IntegerField', [], {'default': '0', 'blank': 'True'}), 'public_access': ('telemeta.models.core.CharField', [], {'default': "'metadata'", 'max_length': '16', 'blank': 'True'}), 'publisher': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'collections'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.Publisher']"}), 'publisher_collection': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'collections'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.PublisherCollection']"}), 'publisher_serial': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'publishing_status': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'collections'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.PublishingStatus']"}), 'recorded_from_year': ('telemeta.models.core.IntegerField', [], {'default': '0', 'blank': 'True'}), 'recorded_to_year': ('telemeta.models.core.IntegerField', [], {'default': '0', 'blank': 'True'}), 'recording_context': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'collections'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.RecordingContext']"}), 'reference': ('telemeta.models.core.CharField', [], {'default': 'None', 'max_length': '250', 'unique': 'True', 'null': 'True', 'blank': 'True'}), 'state': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'title': ('telemeta.models.core.CharField', [], {'max_length': '250'}), 'travail': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'year_published': ('telemeta.models.core.IntegerField', [], {'default': '0', 'blank': 'True'}) }, 'telemeta.mediacollectionrelated': { 'Meta': {'object_name': 'MediaCollectionRelated', 'db_table': "'media_collection_related'"}, 'collection': ('telemeta.models.core.ForeignKey', [], {'related_name': "'related'", 'to': "orm['telemeta.MediaCollection']"}), 'credits': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'date': ('telemeta.models.core.DateTimeField', [], {'default': 'None', 'auto_now': 'True', 'null': 'True', 'blank': 'True'}), 'description': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'file': ('telemeta.models.core.FileField', [], {'default': "''", 'max_length': '100', 'db_column': "'filename'", 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mime_type': ('telemeta.models.core.CharField', [], {'default': 'None', 'max_length': '250', 'null': 'True', 'blank': 'True'}), 'title': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'url': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}) }, 'telemeta.mediacorpus': { 'Meta': {'object_name': 'MediaCorpus', 'db_table': "'media_corpus'"}, 'children': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'corpus'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['telemeta.MediaCollection']"}), 'code': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}), 'description': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'public_access': ('telemeta.models.core.CharField', [], {'default': "'metadata'", 'max_length': '16', 'blank': 'True'}), 'recorded_from_year': ('telemeta.models.core.IntegerField', [], {'default': '0', 'blank': 'True'}), 'recorded_to_year': ('telemeta.models.core.IntegerField', [], {'default': '0', 'blank': 'True'}), 'reference': ('telemeta.models.core.CharField', [], {'default': 'None', 'max_length': '250', 'unique': 'True', 'null': 'True', 'blank': 'True'}), 'title': ('telemeta.models.core.CharField', [], {'max_length': '250'}) }, 'telemeta.mediacorpusrelated': { 'Meta': {'object_name': 'MediaCorpusRelated', 'db_table': "'media_corpus_related'"}, 'credits': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'date': ('telemeta.models.core.DateTimeField', [], {'default': 'None', 'auto_now': 'True', 'null': 'True', 'blank': 'True'}), 'description': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'file': ('telemeta.models.core.FileField', [], {'default': "''", 'max_length': '100', 'db_column': "'filename'", 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mime_type': ('telemeta.models.core.CharField', [], {'default': 'None', 'max_length': '250', 'null': 'True', 'blank': 'True'}), 'resource': ('telemeta.models.core.ForeignKey', [], {'related_name': "'related'", 'to': "orm['telemeta.MediaCorpus']"}), 'title': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'url': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}) }, 'telemeta.mediafonds': { 'Meta': {'object_name': 'MediaFonds', 'db_table': "'media_fonds'"}, 'children': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'fonds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['telemeta.MediaCorpus']"}), 'code': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}), 'description': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'public_access': ('telemeta.models.core.CharField', [], {'default': "'metadata'", 'max_length': '16', 'blank': 'True'}), 'reference': ('telemeta.models.core.CharField', [], {'default': 'None', 'max_length': '250', 'unique': 'True', 'null': 'True', 'blank': 'True'}), 'title': ('telemeta.models.core.CharField', [], {'max_length': '250'}) }, 'telemeta.mediafondsrelated': { 'Meta': {'object_name': 'MediaFondsRelated', 'db_table': "'media_fonds_related'"}, 'credits': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'date': ('telemeta.models.core.DateTimeField', [], {'default': 'None', 'auto_now': 'True', 'null': 'True', 'blank': 'True'}), 'description': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'file': ('telemeta.models.core.FileField', [], {'default': "''", 'max_length': '100', 'db_column': "'filename'", 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mime_type': ('telemeta.models.core.CharField', [], {'default': 'None', 'max_length': '250', 'null': 'True', 'blank': 'True'}), 'resource': ('telemeta.models.core.ForeignKey', [], {'related_name': "'related'", 'to': "orm['telemeta.MediaFonds']"}), 'title': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'url': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}) }, 'telemeta.mediaitem': { 'Meta': {'object_name': 'MediaItem', 'db_table': "'media_items'"}, 'alt_title': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'approx_duration': ('telemeta.models.core.DurationField', [], {'default': "'0'", 'blank': 'True'}), 'author': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'code': ('telemeta.models.core.CharField', [], {'default': "''", 'unique': 'True', 'max_length': '250', 'blank': 'True'}), 'collection': ('telemeta.models.core.ForeignKey', [], {'related_name': "'items'", 'to': "orm['telemeta.MediaCollection']"}), 'collector': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'collector_from_collection': ('telemeta.models.core.BooleanField', [], {'default': 'False'}), 'collector_selection': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'comment': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'context_comment': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'copied_from_item': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'copies'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.MediaItem']"}), 'creator_reference': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'cultural_area': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'ethnic_group': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'items'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.EthnicGroup']"}), 'external_references': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'file': ('telemeta.models.core.FileField', [], {'default': "''", 'max_length': '100', 'db_column': "'filename'", 'blank': 'True'}), 'generic_style': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'items'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.GenericStyle']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'language_iso': ('telemeta.models.core.ForeignKey', [], {'related_name': "'items'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['telemeta.Language']", 'blank': 'True', 'null': 'True'}), 'location': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'to': "orm['telemeta.Location']", 'null': 'True', 'blank': 'True'}), 'location_comment': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'moda_execut': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'old_code': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'public_access': ('telemeta.models.core.CharField', [], {'default': "'metadata'", 'max_length': '16', 'blank': 'True'}), 'recorded_from_date': ('telemeta.models.core.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'recorded_to_date': ('telemeta.models.core.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'title': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'track': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'vernacular_style': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'items'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.VernacularStyle']"}) }, 'telemeta.mediaitemanalysis': { 'Meta': {'ordering': "['name']", 'object_name': 'MediaItemAnalysis', 'db_table': "'media_analysis'"}, 'analyzer_id': ('telemeta.models.core.CharField', [], {'max_length': '250'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'item': ('telemeta.models.core.ForeignKey', [], {'related_name': "'analysis'", 'to': "orm['telemeta.MediaItem']"}), 'name': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'unit': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}) }, 'telemeta.mediaitemkeyword': { 'Meta': {'unique_together': "(('item', 'keyword'),)", 'object_name': 'MediaItemKeyword', 'db_table': "'media_item_keywords'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'item': ('telemeta.models.core.ForeignKey', [], {'related_name': "'keyword_relations'", 'to': "orm['telemeta.MediaItem']"}), 'keyword': ('telemeta.models.core.ForeignKey', [], {'related_name': "'item_relations'", 'to': "orm['telemeta.ContextKeyword']"}) }, 'telemeta.mediaitemmarker': { 'Meta': {'object_name': 'MediaItemMarker', 'db_table': "'media_markers'"}, 'author': ('telemeta.models.core.ForeignKey', [], {'related_name': "'markers'", 'to': "orm['auth.User']"}), 'date': ('telemeta.models.core.DateTimeField', [], {'default': 'None', 'auto_now': 'True', 'null': 'True', 'blank': 'True'}), 'description': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'item': ('telemeta.models.core.ForeignKey', [], {'related_name': "'markers'", 'to': "orm['telemeta.MediaItem']"}), 'public_id': ('telemeta.models.core.CharField', [], {'max_length': '250'}), 'time': ('telemeta.models.core.FloatField', [], {'default': '0', 'blank': 'True'}), 'title': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}) }, 'telemeta.mediaitemperformance': { 'Meta': {'object_name': 'MediaItemPerformance', 'db_table': "'media_item_performances'"}, 'alias': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'performances'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.InstrumentAlias']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'instrument': ('telemeta.models.core.WeakForeignKey', [], {'default': 'None', 'related_name': "'performances'", 'null': 'True', 'blank': 'True', 'to': "orm['telemeta.Instrument']"}), 'instruments_num': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'media_item': ('telemeta.models.core.ForeignKey', [], {'related_name': "'performances'", 'to': "orm['telemeta.MediaItem']"}), 'musicians': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}) }, 'telemeta.mediaitemrelated': { 'Meta': {'object_name': 'MediaItemRelated', 'db_table': "'media_item_related'"}, 'credits': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'date': ('telemeta.models.core.DateTimeField', [], {'default': 'None', 'auto_now': 'True', 'null': 'True', 'blank': 'True'}), 'description': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'file': ('telemeta.models.core.FileField', [], {'default': "''", 'max_length': '100', 'db_column': "'filename'", 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'item': ('telemeta.models.core.ForeignKey', [], {'related_name': "'related'", 'to': "orm['telemeta.MediaItem']"}), 'mime_type': ('telemeta.models.core.CharField', [], {'default': 'None', 'max_length': '250', 'null': 'True', 'blank': 'True'}), 'title': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'url': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}) }, 'telemeta.mediaitemtranscodingflag': { 'Meta': {'object_name': 'MediaItemTranscodingFlag', 'db_table': "'media_transcoding'"}, 'date': ('telemeta.models.core.DateTimeField', [], {'default': 'None', 'auto_now': 'True', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'item': ('telemeta.models.core.ForeignKey', [], {'related_name': "'transcoding'", 'to': "orm['telemeta.MediaItem']"}), 'mime_type': ('telemeta.models.core.CharField', [], {'max_length': '250'}), 'value': ('telemeta.models.core.BooleanField', [], {'default': 'False'}) }, 'telemeta.mediapart': { 'Meta': {'object_name': 'MediaPart', 'db_table': "'media_parts'"}, 'end': ('telemeta.models.core.FloatField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'item': ('telemeta.models.core.ForeignKey', [], {'related_name': "'parts'", 'to': "orm['telemeta.MediaItem']"}), 'start': ('telemeta.models.core.FloatField', [], {}), 'title': ('telemeta.models.core.CharField', [], {'max_length': '250'}) }, 'telemeta.metadataauthor': { 'Meta': {'ordering': "['value']", 'object_name': 'MetadataAuthor', 'db_table': "'metadata_authors'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.metadatawriter': { 'Meta': {'ordering': "['value']", 'object_name': 'MetadataWriter', 'db_table': "'metadata_writers'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.physicalformat': { 'Meta': {'ordering': "['value']", 'object_name': 'PhysicalFormat', 'db_table': "'physical_formats'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.playlist': { 'Meta': {'object_name': 'Playlist', 'db_table': "'playlists'"}, 'author': ('telemeta.models.core.ForeignKey', [], {'related_name': "'playlists'", 'db_column': "'author'", 'to': "orm['auth.User']"}), 'description': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'public_id': ('telemeta.models.core.CharField', [], {'max_length': '250'}), 'title': ('telemeta.models.core.CharField', [], {'max_length': '250'}) }, 'telemeta.playlistresource': { 'Meta': {'object_name': 'PlaylistResource', 'db_table': "'playlist_resources'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'playlist': ('telemeta.models.core.ForeignKey', [], {'related_name': "'resources'", 'to': "orm['telemeta.Playlist']"}), 'public_id': ('telemeta.models.core.CharField', [], {'max_length': '250'}), 'resource_id': ('telemeta.models.core.CharField', [], {'max_length': '250'}), 'resource_type': ('telemeta.models.core.CharField', [], {'max_length': '250'}) }, 'telemeta.publisher': { 'Meta': {'ordering': "['value']", 'object_name': 'Publisher', 'db_table': "'publishers'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.publishercollection': { 'Meta': {'ordering': "['value']", 'object_name': 'PublisherCollection', 'db_table': "'publisher_collections'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'publisher': ('telemeta.models.core.ForeignKey', [], {'related_name': "'publisher_collections'", 'to': "orm['telemeta.Publisher']"}), 'value': ('telemeta.models.core.CharField', [], {'max_length': '250'}) }, 'telemeta.publishingstatus': { 'Meta': {'ordering': "['value']", 'object_name': 'PublishingStatus', 'db_table': "'publishing_status'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.recordingcontext': { 'Meta': {'ordering': "['value']", 'object_name': 'RecordingContext', 'db_table': "'recording_contexts'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.revision': { 'Meta': {'object_name': 'Revision', 'db_table': "'revisions'"}, 'change_type': ('telemeta.models.core.CharField', [], {'max_length': '16'}), 'element_id': ('telemeta.models.core.IntegerField', [], {}), 'element_type': ('telemeta.models.core.CharField', [], {'max_length': '16'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'time': ('telemeta.models.core.DateTimeField', [], {'default': 'None', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), 'user': ('telemeta.models.core.ForeignKey', [], {'related_name': "'revisions'", 'db_column': "'username'", 'to': "orm['auth.User']"}) }, 'telemeta.search': { 'Meta': {'ordering': "['-date']", 'object_name': 'Search', 'db_table': "'searches'"}, 'criteria': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'search'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['telemeta.Criteria']"}), 'date': ('telemeta.models.core.DateTimeField', [], {'default': 'None', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), 'description': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'username': ('telemeta.models.core.ForeignKey', [], {'related_name': "'searches'", 'db_column': "'username'", 'to': "orm['auth.User']"}) }, 'telemeta.tapelength': { 'Meta': {'ordering': "['value']", 'object_name': 'TapeLength', 'db_table': "'tape_length'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.tapespeed': { 'Meta': {'ordering': "['value']", 'object_name': 'TapeSpeed', 'db_table': "'tape_speed'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.tapevendor': { 'Meta': {'ordering': "['value']", 'object_name': 'TapeVendor', 'db_table': "'tape_vendor'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.tapewidth': { 'Meta': {'ordering': "['value']", 'object_name': 'TapeWidth', 'db_table': "'tape_width'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) }, 'telemeta.userprofile': { 'Meta': {'object_name': 'UserProfile', 'db_table': "'profiles'"}, 'address': ('telemeta.models.core.TextField', [], {'default': "''", 'blank': 'True'}), 'attachment': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'department': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'expiration_date': ('telemeta.models.core.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'function': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'institution': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'telephone': ('telemeta.models.core.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}), 'user': ('telemeta.models.core.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}) }, 'telemeta.vernacularstyle': { 'Meta': {'ordering': "['value']", 'object_name': 'VernacularStyle', 'db_table': "'vernacular_styles'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'value': ('telemeta.models.core.CharField', [], {'unique': 'True', 'max_length': '250'}) } } complete_apps = ['telemeta']
agpl-3.0
cizixs/tftp
tftp/tftp_client.py
1
7600
import sys import struct import binascii import argparse import tftp from tftp import SocketBase from tftp import get_opcode from tftp import default_port from tftp import make_data_packet from tftp import make_ack_packet class State(object): START, DATA = range(2) # Make packet functions. def make_request_packet(opcode, filename, mode='octet'): values = (opcode, filename, 0, mode, 0) s = struct.Struct('! H {}s B {}s B'.format(len(filename),len(mode)) ) return s.pack(*values) def make_rrq_packet(filename): return make_request_packet(tftp.RRQ, filename) def make_wrq_packet(filename): return make_request_packet(tftp.WRQ, filename) class TftpClient(SocketBase): def __init__(self, host='127.0.0.1', port='', filename=None, **argv): self.host = host self.orig_port = self.port = port or default_port() self.block_num = 1 self.is_done = False self.status = State.START self.action = argv.get('action', 'get') self.debug = argv.get('debug', False) self.block_size = argv.get('block_size', tftp.DEFAULT_BLOCK_SIZE) self.filename = filename self.setup_file() self.setup_connect() def reset(self): self.block_num = 1 self.is_done = False self.status = State.START self.port = self.orig_port or 69 self.setup_file() self.setup_connect() @property def server_addr(self): return (self.host, self.port) def setup_file(self): if self.filename: if self.action == 'get': self.fd = open(self.filename, 'wb') elif self.action == 'put': self.fd = open(self.filename, 'rb') else: raise Exception('unsupport action %s' % self.action) def handle_packet(self, packet, addr): """Handle pakcet from remote. If it's a wrong packet, not from expected host/port, discard it; If it's a data packet, send ACK packet back; If it's a error packet, print error and exit; If it's a ack packet, send Data packet back. """ host, port = addr if host != self.host: # ignore packet from wrong address. return packet_len = len(packet) opcode = get_opcode(packet) if opcode == tftp.ERROR: err_code = struct.unpack('!H', packet[2:4])[0] err_msg = packet[4:packet_len-1] print "Error %s: %s" % (err_code, err_msg) sys.exit(err_code) elif opcode == tftp.DATA: # This is a data packet received from server, save data to file. # update port if self.port != port: self.port = port block_num = struct.unpack('!H', packet[2:4])[0] if block_num != self.block_num: # skip unexpected #block data packet print 'unexpected block num %d' % block_num return data = packet[4:] self.fd.write(data) if len(packet) < self.block_size + 2: self.is_done = True self.fd.close() file_len = self.block_size * (self.block_num -1) + len(data) print '%d bytes received.' % file_len self.block_num += 1 elif opcode == tftp.ACK: # This is a write request ACK # Send next block_size data to server if self.port != port: self.port = port block_num = struct.unpack('!H', packet[2:4])[0] self.verbose('received ack for %d' % block_num) self.block_num += 1 else: raise Exception('unrecognized packet: %s', str(opcode)) def get_next_packet(self): if self.status == State.START: opcode = tftp.RRQ if self.action == 'get' else tftp.WRQ self.verbose('about to send packet %d' % opcode) packet = make_request_packet(opcode, self.filename) self.status = State.DATA elif self.status == State.DATA: if self.action == 'get': self.verbose('about to send ack for %d' % (self.block_num - 1)) packet = make_ack_packet(self.block_num-1) elif self.action == 'put': self.verbose('about to send data for %d' % (self.block_num - 1)) data = self.fd.read(self.block_size) if len(data) < self.block_size: self.is_done = True packet = make_data_packet(self.block_num-1, data) return packet def handle(self): """Main loop function for tftp. The main loop works like the following: 1. get next-to-send packet 2. send the packet to server 3. receive packet from server 4. handle packet received, back to step 1. """ while not self.is_done: packet = self.get_next_packet() if packet: self.send_packet(packet) (packet, addr) = self.recv_packet() self.handle_packet(packet, addr) def main(): menu = """Tftp client help menu: Supported commands: connect connect to a server get get file put put file quit exit ? print this menu """ def command_parse(line): if not line: return (None, None) line = line.split() command = line[0] options = line[1:] return command, options tftp_client = TftpClient() def connect(*args): tftp_client.host = args[0] if len(args) > 1: tftp_client.port = int(args[1]) def get(*args): print args[0] tftp_client.action = 'get' tftp_client.filename = args[0] tftp_client.reset() tftp_client.handle() def put(*args): tftp_client.filename = args[0] tftp_client.action = 'put' tftp_client.reset() tftp_client.handle() def quit(*args): print 'Bye!' def print_help(*args): print menu command_map = { 'connect': connect, 'get': get, 'put': put, 'quit': quit, } print 'Welcome to python tftpclient.' while True: line = raw_input('tftp> ').strip().lower() command, options = command_parse(line) command_map.get(command, print_help)(*options) if command == 'quit': break if __name__ == "__main__": parser = argparse.ArgumentParser(description='Tftp client in pure python.') parser.add_argument('--host', '-s', action='store', dest='host', default='127.0.0.1', help='Server hostname') parser.add_argument('--port', '-p', action='store', dest='port', type=int, default=69, help='Server port') parser.add_argument('--file', '-f', action='store', dest='filename', help='File to get from server') parser.add_argument('--debug', '-d', action='store_true', default=False, help='Debug mode: print more information(debug: False)') parser.add_argument('action', metavar='action', nargs='*', help='Action to conduct: put or get(default: get)') args = parser.parse_args() print args if not args.filename or not args.action: main() else: tftp_client = TftpClient(args.host, args.port, args.filename, action=args.action[0], debug=args.debug) tftp_client.handle()
mit
binjitsu/tutorial
walkthrough/remote-network-connection/exploit.py
2
1146
from pwn import * # Vortex Level 0 -> Level 1 # # Level Goal # # Your goal is to connect to port 5842 on vortex.labs.overthewire.org and read # in 4 unsigned integers in host byte order. Add these integers together and # send back the results to get a username and password for vortex1. # # This information can be used to log in using SSH. # # Note: vortex is on an 32bit x86 machine (meaning, a little endian architecture) io = remote('vortex.labs.overthewire.org', 5842) # You can receive data manually. We want exactly four bytes. data = io.recvn(4) # Now let's unpack them as a 32-bit little-endian integer value = unpack(data, bits=32, endian='little') # By default, pwntools sets everything to i386, which is 32-bit little endian. # Because of this, there is no need to specify the extra arguments. # # The above line could instead just read: value = unpack(data) # There's also a helper available directly on the tube itself # Let's read the other integers value += io.unpack() value += io.unpack() value += io.unpack() # Now let's send it back io.pack(value) # Receive all data until the connection closes log.info(io.recvall())
mit
baberthal/CouchPotatoServer
libs/html5lib/serializer/htmlserializer.py
235
12897
from __future__ import absolute_import, division, unicode_literals from six import text_type import gettext _ = gettext.gettext try: from functools import reduce except ImportError: pass from ..constants import voidElements, booleanAttributes, spaceCharacters from ..constants import rcdataElements, entities, xmlEntities from .. import utils from xml.sax.saxutils import escape spaceCharacters = "".join(spaceCharacters) try: from codecs import register_error, xmlcharrefreplace_errors except ImportError: unicode_encode_errors = "strict" else: unicode_encode_errors = "htmlentityreplace" encode_entity_map = {} is_ucs4 = len("\U0010FFFF") == 1 for k, v in list(entities.items()): # skip multi-character entities if ((is_ucs4 and len(v) > 1) or (not is_ucs4 and len(v) > 2)): continue if v != "&": if len(v) == 2: v = utils.surrogatePairToCodepoint(v) else: v = ord(v) if not v in encode_entity_map or k.islower(): # prefer &lt; over &LT; and similarly for &amp;, &gt;, etc. encode_entity_map[v] = k def htmlentityreplace_errors(exc): if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)): res = [] codepoints = [] skip = False for i, c in enumerate(exc.object[exc.start:exc.end]): if skip: skip = False continue index = i + exc.start if utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]): codepoint = utils.surrogatePairToCodepoint(exc.object[index:index + 2]) skip = True else: codepoint = ord(c) codepoints.append(codepoint) for cp in codepoints: e = encode_entity_map.get(cp) if e: res.append("&") res.append(e) if not e.endswith(";"): res.append(";") else: res.append("&#x%s;" % (hex(cp)[2:])) return ("".join(res), exc.end) else: return xmlcharrefreplace_errors(exc) register_error(unicode_encode_errors, htmlentityreplace_errors) del register_error class HTMLSerializer(object): # attribute quoting options quote_attr_values = False quote_char = '"' use_best_quote_char = True # tag syntax options omit_optional_tags = True minimize_boolean_attributes = True use_trailing_solidus = False space_before_trailing_solidus = True # escaping options escape_lt_in_attrs = False escape_rcdata = False resolve_entities = True # miscellaneous options alphabetical_attributes = False inject_meta_charset = True strip_whitespace = False sanitize = False options = ("quote_attr_values", "quote_char", "use_best_quote_char", "omit_optional_tags", "minimize_boolean_attributes", "use_trailing_solidus", "space_before_trailing_solidus", "escape_lt_in_attrs", "escape_rcdata", "resolve_entities", "alphabetical_attributes", "inject_meta_charset", "strip_whitespace", "sanitize") def __init__(self, **kwargs): """Initialize HTMLSerializer. Keyword options (default given first unless specified) include: inject_meta_charset=True|False Whether it insert a meta element to define the character set of the document. quote_attr_values=True|False Whether to quote attribute values that don't require quoting per HTML5 parsing rules. quote_char=u'"'|u"'" Use given quote character for attribute quoting. Default is to use double quote unless attribute value contains a double quote, in which case single quotes are used instead. escape_lt_in_attrs=False|True Whether to escape < in attribute values. escape_rcdata=False|True Whether to escape characters that need to be escaped within normal elements within rcdata elements such as style. resolve_entities=True|False Whether to resolve named character entities that appear in the source tree. The XML predefined entities &lt; &gt; &amp; &quot; &apos; are unaffected by this setting. strip_whitespace=False|True Whether to remove semantically meaningless whitespace. (This compresses all whitespace to a single space except within pre.) minimize_boolean_attributes=True|False Shortens boolean attributes to give just the attribute value, for example <input disabled="disabled"> becomes <input disabled>. use_trailing_solidus=False|True Includes a close-tag slash at the end of the start tag of void elements (empty elements whose end tag is forbidden). E.g. <hr/>. space_before_trailing_solidus=True|False Places a space immediately before the closing slash in a tag using a trailing solidus. E.g. <hr />. Requires use_trailing_solidus. sanitize=False|True Strip all unsafe or unknown constructs from output. See `html5lib user documentation`_ omit_optional_tags=True|False Omit start/end tags that are optional. alphabetical_attributes=False|True Reorder attributes to be in alphabetical order. .. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation """ if 'quote_char' in kwargs: self.use_best_quote_char = False for attr in self.options: setattr(self, attr, kwargs.get(attr, getattr(self, attr))) self.errors = [] self.strict = False def encode(self, string): assert(isinstance(string, text_type)) if self.encoding: return string.encode(self.encoding, unicode_encode_errors) else: return string def encodeStrict(self, string): assert(isinstance(string, text_type)) if self.encoding: return string.encode(self.encoding, "strict") else: return string def serialize(self, treewalker, encoding=None): self.encoding = encoding in_cdata = False self.errors = [] if encoding and self.inject_meta_charset: from ..filters.inject_meta_charset import Filter treewalker = Filter(treewalker, encoding) # WhitespaceFilter should be used before OptionalTagFilter # for maximum efficiently of this latter filter if self.strip_whitespace: from ..filters.whitespace import Filter treewalker = Filter(treewalker) if self.sanitize: from ..filters.sanitizer import Filter treewalker = Filter(treewalker) if self.omit_optional_tags: from ..filters.optionaltags import Filter treewalker = Filter(treewalker) # Alphabetical attributes must be last, as other filters # could add attributes and alter the order if self.alphabetical_attributes: from ..filters.alphabeticalattributes import Filter treewalker = Filter(treewalker) for token in treewalker: type = token["type"] if type == "Doctype": doctype = "<!DOCTYPE %s" % token["name"] if token["publicId"]: doctype += ' PUBLIC "%s"' % token["publicId"] elif token["systemId"]: doctype += " SYSTEM" if token["systemId"]: if token["systemId"].find('"') >= 0: if token["systemId"].find("'") >= 0: self.serializeError(_("System identifer contains both single and double quote characters")) quote_char = "'" else: quote_char = '"' doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char) doctype += ">" yield self.encodeStrict(doctype) elif type in ("Characters", "SpaceCharacters"): if type == "SpaceCharacters" or in_cdata: if in_cdata and token["data"].find("</") >= 0: self.serializeError(_("Unexpected </ in CDATA")) yield self.encode(token["data"]) else: yield self.encode(escape(token["data"])) elif type in ("StartTag", "EmptyTag"): name = token["name"] yield self.encodeStrict("<%s" % name) if name in rcdataElements and not self.escape_rcdata: in_cdata = True elif in_cdata: self.serializeError(_("Unexpected child element of a CDATA element")) for (attr_namespace, attr_name), attr_value in token["data"].items(): # TODO: Add namespace support here k = attr_name v = attr_value yield self.encodeStrict(' ') yield self.encodeStrict(k) if not self.minimize_boolean_attributes or \ (k not in booleanAttributes.get(name, tuple()) and k not in booleanAttributes.get("", tuple())): yield self.encodeStrict("=") if self.quote_attr_values or not v: quote_attr = True else: quote_attr = reduce(lambda x, y: x or (y in v), spaceCharacters + ">\"'=", False) v = v.replace("&", "&amp;") if self.escape_lt_in_attrs: v = v.replace("<", "&lt;") if quote_attr: quote_char = self.quote_char if self.use_best_quote_char: if "'" in v and '"' not in v: quote_char = '"' elif '"' in v and "'" not in v: quote_char = "'" if quote_char == "'": v = v.replace("'", "&#39;") else: v = v.replace('"', "&quot;") yield self.encodeStrict(quote_char) yield self.encode(v) yield self.encodeStrict(quote_char) else: yield self.encode(v) if name in voidElements and self.use_trailing_solidus: if self.space_before_trailing_solidus: yield self.encodeStrict(" /") else: yield self.encodeStrict("/") yield self.encode(">") elif type == "EndTag": name = token["name"] if name in rcdataElements: in_cdata = False elif in_cdata: self.serializeError(_("Unexpected child element of a CDATA element")) yield self.encodeStrict("</%s>" % name) elif type == "Comment": data = token["data"] if data.find("--") >= 0: self.serializeError(_("Comment contains --")) yield self.encodeStrict("<!--%s-->" % token["data"]) elif type == "Entity": name = token["name"] key = name + ";" if not key in entities: self.serializeError(_("Entity %s not recognized" % name)) if self.resolve_entities and key not in xmlEntities: data = entities[key] else: data = "&%s;" % name yield self.encodeStrict(data) else: self.serializeError(token["data"]) def render(self, treewalker, encoding=None): if encoding: return b"".join(list(self.serialize(treewalker, encoding))) else: return "".join(list(self.serialize(treewalker))) def serializeError(self, data="XXX ERROR MESSAGE NEEDED"): # XXX The idea is to make data mandatory. self.errors.append(data) if self.strict: raise SerializeError def SerializeError(Exception): """Error in serialized tree""" pass
gpl-3.0
nwokeo/supysonic
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/eucjpprober.py
2919
3678
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### import sys from . import constants from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import EUCJPDistributionAnalysis from .jpcntx import EUCJPContextAnalysis from .mbcssm import EUCJPSMModel class EUCJPProber(MultiByteCharSetProber): def __init__(self): MultiByteCharSetProber.__init__(self) self._mCodingSM = CodingStateMachine(EUCJPSMModel) self._mDistributionAnalyzer = EUCJPDistributionAnalysis() self._mContextAnalyzer = EUCJPContextAnalysis() self.reset() def reset(self): MultiByteCharSetProber.reset(self) self._mContextAnalyzer.reset() def get_charset_name(self): return "EUC-JP" def feed(self, aBuf): aLen = len(aBuf) for i in range(0, aLen): # PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte codingState = self._mCodingSM.next_state(aBuf[i]) if codingState == constants.eError: if constants._debug: sys.stderr.write(self.get_charset_name() + ' prober hit error at byte ' + str(i) + '\n') self._mState = constants.eNotMe break elif codingState == constants.eItsMe: self._mState = constants.eFoundIt break elif codingState == constants.eStart: charLen = self._mCodingSM.get_current_charlen() if i == 0: self._mLastChar[1] = aBuf[0] self._mContextAnalyzer.feed(self._mLastChar, charLen) self._mDistributionAnalyzer.feed(self._mLastChar, charLen) else: self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen) self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], charLen) self._mLastChar[0] = aBuf[aLen - 1] if self.get_state() == constants.eDetecting: if (self._mContextAnalyzer.got_enough_data() and (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): self._mState = constants.eFoundIt return self.get_state() def get_confidence(self): contxtCf = self._mContextAnalyzer.get_confidence() distribCf = self._mDistributionAnalyzer.get_confidence() return max(contxtCf, distribCf)
agpl-3.0
mtconley/turntable
test/lib/python2.7/site-packages/scipy/stats/tests/test_morestats.py
7
38719
# Author: Travis Oliphant, 2002 # # Further enhancements and tests added by numerous SciPy developers. # from __future__ import division, print_function, absolute_import import warnings import numpy as np from numpy.random import RandomState from numpy.testing import (TestCase, run_module_suite, assert_array_equal, assert_almost_equal, assert_array_less, assert_array_almost_equal, assert_raises, assert_, assert_allclose, assert_equal, dec, assert_warns) from scipy import stats # Matplotlib is not a scipy dependency but is optionally used in probplot, so # check if it's available try: import matplotlib.pyplot as plt have_matplotlib = True except: have_matplotlib = False g1 = [1.006, 0.996, 0.998, 1.000, 0.992, 0.993, 1.002, 0.999, 0.994, 1.000] g2 = [0.998, 1.006, 1.000, 1.002, 0.997, 0.998, 0.996, 1.000, 1.006, 0.988] g3 = [0.991, 0.987, 0.997, 0.999, 0.995, 0.994, 1.000, 0.999, 0.996, 0.996] g4 = [1.005, 1.002, 0.994, 1.000, 0.995, 0.994, 0.998, 0.996, 1.002, 0.996] g5 = [0.998, 0.998, 0.982, 0.990, 1.002, 0.984, 0.996, 0.993, 0.980, 0.996] g6 = [1.009, 1.013, 1.009, 0.997, 0.988, 1.002, 0.995, 0.998, 0.981, 0.996] g7 = [0.990, 1.004, 0.996, 1.001, 0.998, 1.000, 1.018, 1.010, 0.996, 1.002] g8 = [0.998, 1.000, 1.006, 1.000, 1.002, 0.996, 0.998, 0.996, 1.002, 1.006] g9 = [1.002, 0.998, 0.996, 0.995, 0.996, 1.004, 1.004, 0.998, 0.999, 0.991] g10 = [0.991, 0.995, 0.984, 0.994, 0.997, 0.997, 0.991, 0.998, 1.004, 0.997] class TestShapiro(TestCase): def test_basic(self): x1 = [0.11,7.87,4.61,10.14,7.95,3.14,0.46, 4.43,0.21,4.75,0.71,1.52,3.24, 0.93,0.42,4.97,9.53,4.55,0.47,6.66] w,pw = stats.shapiro(x1) assert_almost_equal(w,0.90047299861907959,6) assert_almost_equal(pw,0.042089745402336121,6) x2 = [1.36,1.14,2.92,2.55,1.46,1.06,5.27,-1.11, 3.48,1.10,0.88,-0.51,1.46,0.52,6.20,1.69, 0.08,3.67,2.81,3.49] w,pw = stats.shapiro(x2) assert_almost_equal(w,0.9590270,6) assert_almost_equal(pw,0.52460,3) def test_bad_arg(self): # Length of x is less than 3. x = [1] assert_raises(ValueError, stats.shapiro, x) class TestAnderson(TestCase): def test_normal(self): rs = RandomState(1234567890) x1 = rs.standard_exponential(size=50) x2 = rs.standard_normal(size=50) A,crit,sig = stats.anderson(x1) assert_array_less(crit[:-1], A) A,crit,sig = stats.anderson(x2) assert_array_less(A, crit[-2:]) def test_expon(self): rs = RandomState(1234567890) x1 = rs.standard_exponential(size=50) x2 = rs.standard_normal(size=50) A,crit,sig = stats.anderson(x1,'expon') assert_array_less(A, crit[-2:]) olderr = np.seterr(all='ignore') try: A,crit,sig = stats.anderson(x2,'expon') finally: np.seterr(**olderr) assert_(A > crit[-1]) def test_bad_arg(self): assert_raises(ValueError, stats.anderson, [1], dist='plate_of_shrimp') class TestAndersonKSamp(TestCase): def test_example1a(self): # Example data from Scholz & Stephens (1987), originally # published in Lehmann (1995, Nonparametrics, Statistical # Methods Based on Ranks, p. 309) # Pass a mixture of lists and arrays t1 = [38.7, 41.5, 43.8, 44.5, 45.5, 46.0, 47.7, 58.0] t2 = np.array([39.2, 39.3, 39.7, 41.4, 41.8, 42.9, 43.3, 45.8]) t3 = np.array([34.0, 35.0, 39.0, 40.0, 43.0, 43.0, 44.0, 45.0]) t4 = np.array([34.0, 34.8, 34.8, 35.4, 37.2, 37.8, 41.2, 42.8]) assert_warns(UserWarning, stats.anderson_ksamp, (t1, t2, t3, t4), midrank=False) with warnings.catch_warnings(): warnings.filterwarnings('ignore', message='approximate p-value') Tk, tm, p = stats.anderson_ksamp((t1, t2, t3, t4), midrank=False) assert_almost_equal(Tk, 4.449, 3) assert_array_almost_equal([0.4985, 1.3237, 1.9158, 2.4930, 3.2459], tm, 4) assert_almost_equal(p, 0.0021, 4) def test_example1b(self): # Example data from Scholz & Stephens (1987), originally # published in Lehmann (1995, Nonparametrics, Statistical # Methods Based on Ranks, p. 309) # Pass arrays t1 = np.array([38.7, 41.5, 43.8, 44.5, 45.5, 46.0, 47.7, 58.0]) t2 = np.array([39.2, 39.3, 39.7, 41.4, 41.8, 42.9, 43.3, 45.8]) t3 = np.array([34.0, 35.0, 39.0, 40.0, 43.0, 43.0, 44.0, 45.0]) t4 = np.array([34.0, 34.8, 34.8, 35.4, 37.2, 37.8, 41.2, 42.8]) with warnings.catch_warnings(): warnings.filterwarnings('ignore', message='approximate p-value') Tk, tm, p = stats.anderson_ksamp((t1, t2, t3, t4), midrank=True) assert_almost_equal(Tk, 4.480, 3) assert_array_almost_equal([0.4985, 1.3237, 1.9158, 2.4930, 3.2459], tm, 4) assert_almost_equal(p, 0.0020, 4) def test_example2a(self): # Example data taken from an earlier technical report of # Scholz and Stephens # Pass lists instead of arrays t1 = [194, 15, 41, 29, 33, 181] t2 = [413, 14, 58, 37, 100, 65, 9, 169, 447, 184, 36, 201, 118] t3 = [34, 31, 18, 18, 67, 57, 62, 7, 22, 34] t4 = [90, 10, 60, 186, 61, 49, 14, 24, 56, 20, 79, 84, 44, 59, 29, 118, 25, 156, 310, 76, 26, 44, 23, 62] t5 = [130, 208, 70, 101, 208] t6 = [74, 57, 48, 29, 502, 12, 70, 21, 29, 386, 59, 27] t7 = [55, 320, 56, 104, 220, 239, 47, 246, 176, 182, 33] t8 = [23, 261, 87, 7, 120, 14, 62, 47, 225, 71, 246, 21, 42, 20, 5, 12, 120, 11, 3, 14, 71, 11, 14, 11, 16, 90, 1, 16, 52, 95] t9 = [97, 51, 11, 4, 141, 18, 142, 68, 77, 80, 1, 16, 106, 206, 82, 54, 31, 216, 46, 111, 39, 63, 18, 191, 18, 163, 24] t10 = [50, 44, 102, 72, 22, 39, 3, 15, 197, 188, 79, 88, 46, 5, 5, 36, 22, 139, 210, 97, 30, 23, 13, 14] t11 = [359, 9, 12, 270, 603, 3, 104, 2, 438] t12 = [50, 254, 5, 283, 35, 12] t13 = [487, 18, 100, 7, 98, 5, 85, 91, 43, 230, 3, 130] t14 = [102, 209, 14, 57, 54, 32, 67, 59, 134, 152, 27, 14, 230, 66, 61, 34] with warnings.catch_warnings(): warnings.filterwarnings('ignore', message='approximate p-value') Tk, tm, p = stats.anderson_ksamp((t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14), midrank=False) assert_almost_equal(Tk, 3.288, 3) assert_array_almost_equal([0.5990, 1.3269, 1.8052, 2.2486, 2.8009], tm, 4) assert_almost_equal(p, 0.0041, 4) def test_example2b(self): # Example data taken from an earlier technical report of # Scholz and Stephens t1 = [194, 15, 41, 29, 33, 181] t2 = [413, 14, 58, 37, 100, 65, 9, 169, 447, 184, 36, 201, 118] t3 = [34, 31, 18, 18, 67, 57, 62, 7, 22, 34] t4 = [90, 10, 60, 186, 61, 49, 14, 24, 56, 20, 79, 84, 44, 59, 29, 118, 25, 156, 310, 76, 26, 44, 23, 62] t5 = [130, 208, 70, 101, 208] t6 = [74, 57, 48, 29, 502, 12, 70, 21, 29, 386, 59, 27] t7 = [55, 320, 56, 104, 220, 239, 47, 246, 176, 182, 33] t8 = [23, 261, 87, 7, 120, 14, 62, 47, 225, 71, 246, 21, 42, 20, 5, 12, 120, 11, 3, 14, 71, 11, 14, 11, 16, 90, 1, 16, 52, 95] t9 = [97, 51, 11, 4, 141, 18, 142, 68, 77, 80, 1, 16, 106, 206, 82, 54, 31, 216, 46, 111, 39, 63, 18, 191, 18, 163, 24] t10 = [50, 44, 102, 72, 22, 39, 3, 15, 197, 188, 79, 88, 46, 5, 5, 36, 22, 139, 210, 97, 30, 23, 13, 14] t11 = [359, 9, 12, 270, 603, 3, 104, 2, 438] t12 = [50, 254, 5, 283, 35, 12] t13 = [487, 18, 100, 7, 98, 5, 85, 91, 43, 230, 3, 130] t14 = [102, 209, 14, 57, 54, 32, 67, 59, 134, 152, 27, 14, 230, 66, 61, 34] with warnings.catch_warnings(): warnings.filterwarnings('ignore', message='approximate p-value') Tk, tm, p = stats.anderson_ksamp((t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14), midrank=True) assert_almost_equal(Tk, 3.294, 3) assert_array_almost_equal([0.5990, 1.3269, 1.8052, 2.2486, 2.8009], tm, 4) assert_almost_equal(p, 0.0041, 4) def test_not_enough_samples(self): assert_raises(ValueError, stats.anderson_ksamp, np.ones(5)) def test_no_distinct_observations(self): assert_raises(ValueError, stats.anderson_ksamp, (np.ones(5), np.ones(5))) def test_empty_sample(self): assert_raises(ValueError, stats.anderson_ksamp, (np.ones(5), [])) class TestAnsari(TestCase): def test_small(self): x = [1,2,3,3,4] y = [3,2,6,1,6,1,4,1] W, pval = stats.ansari(x,y) assert_almost_equal(W,23.5,11) assert_almost_equal(pval,0.13499256881897437,11) def test_approx(self): ramsay = np.array((111, 107, 100, 99, 102, 106, 109, 108, 104, 99, 101, 96, 97, 102, 107, 113, 116, 113, 110, 98)) parekh = np.array((107, 108, 106, 98, 105, 103, 110, 105, 104, 100, 96, 108, 103, 104, 114, 114, 113, 108, 106, 99)) with warnings.catch_warnings(): warnings.filterwarnings('ignore', message="Ties preclude use of exact statistic.") W, pval = stats.ansari(ramsay, parekh) assert_almost_equal(W,185.5,11) assert_almost_equal(pval,0.18145819972867083,11) def test_exact(self): W,pval = stats.ansari([1,2,3,4],[15,5,20,8,10,12]) assert_almost_equal(W,10.0,11) assert_almost_equal(pval,0.533333333333333333,7) def test_bad_arg(self): assert_raises(ValueError, stats.ansari, [], [1]) assert_raises(ValueError, stats.ansari, [1], []) class TestBartlett(TestCase): def test_data(self): args = [g1, g2, g3, g4, g5, g6, g7, g8, g9, g10] T, pval = stats.bartlett(*args) assert_almost_equal(T,20.78587342806484,7) assert_almost_equal(pval,0.0136358632781,7) def test_bad_arg(self): # Too few args raises ValueError. assert_raises(ValueError, stats.bartlett, [1]) class TestLevene(TestCase): def test_data(self): args = [g1, g2, g3, g4, g5, g6, g7, g8, g9, g10] W, pval = stats.levene(*args) assert_almost_equal(W,1.7059176930008939,7) assert_almost_equal(pval,0.0990829755522,7) def test_trimmed1(self): # Test that center='trimmed' gives the same result as center='mean' # when proportiontocut=0. W1, pval1 = stats.levene(g1, g2, g3, center='mean') W2, pval2 = stats.levene(g1, g2, g3, center='trimmed', proportiontocut=0.0) assert_almost_equal(W1, W2) assert_almost_equal(pval1, pval2) def test_trimmed2(self): x = [1.2, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 100.0] y = [0.0, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 200.0] np.random.seed(1234) x2 = np.random.permutation(x) # Use center='trimmed' W0, pval0 = stats.levene(x, y, center='trimmed', proportiontocut=0.125) W1, pval1 = stats.levene(x2, y, center='trimmed', proportiontocut=0.125) # Trim the data here, and use center='mean' W2, pval2 = stats.levene(x[1:-1], y[1:-1], center='mean') # Result should be the same. assert_almost_equal(W0, W2) assert_almost_equal(W1, W2) assert_almost_equal(pval1, pval2) def test_equal_mean_median(self): x = np.linspace(-1,1,21) np.random.seed(1234) x2 = np.random.permutation(x) y = x**3 W1, pval1 = stats.levene(x, y, center='mean') W2, pval2 = stats.levene(x2, y, center='median') assert_almost_equal(W1, W2) assert_almost_equal(pval1, pval2) def test_bad_keyword(self): x = np.linspace(-1,1,21) assert_raises(TypeError, stats.levene, x, x, portiontocut=0.1) def test_bad_center_value(self): x = np.linspace(-1,1,21) assert_raises(ValueError, stats.levene, x, x, center='trim') def test_too_few_args(self): assert_raises(ValueError, stats.levene, [1]) class TestBinomP(TestCase): def test_data(self): pval = stats.binom_test(100,250) assert_almost_equal(pval,0.0018833009350757682,11) pval = stats.binom_test(201,405) assert_almost_equal(pval,0.92085205962670713,11) pval = stats.binom_test([682,243],p=3.0/4) assert_almost_equal(pval,0.38249155957481695,11) def test_bad_len_x(self): # Length of x must be 1 or 2. assert_raises(ValueError, stats.binom_test, [1,2,3]) def test_bad_n(self): # len(x) is 1, but n is invalid. # Missing n assert_raises(ValueError, stats.binom_test, [100]) # n less than x[0] assert_raises(ValueError, stats.binom_test, [100], n=50) def test_bad_p(self): assert_raises(ValueError, stats.binom_test, [50, 50], p=2.0) class TestFindRepeats(TestCase): def test_basic(self): a = [1,2,3,4,1,2,3,4,1,2,5] res,nums = stats.find_repeats(a) assert_array_equal(res,[1,2,3,4]) assert_array_equal(nums,[3,3,2,2]) def test_empty_result(self): # Check that empty arrays are returned when there are no repeats. a = [10, 20, 50, 30, 40] repeated, counts = stats.find_repeats(a) assert_array_equal(repeated, []) assert_array_equal(counts, []) class TestFligner(TestCase): def test_data(self): # numbers from R: fligner.test in package stats x1 = np.arange(5) assert_array_almost_equal(stats.fligner(x1,x1**2), (3.2282229927203536, 0.072379187848207877), 11) def test_trimmed1(self): # Test that center='trimmed' gives the same result as center='mean' # when proportiontocut=0. Xsq1, pval1 = stats.fligner(g1, g2, g3, center='mean') Xsq2, pval2 = stats.fligner(g1, g2, g3, center='trimmed', proportiontocut=0.0) assert_almost_equal(Xsq1, Xsq2) assert_almost_equal(pval1, pval2) def test_trimmed2(self): x = [1.2, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 100.0] y = [0.0, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 200.0] # Use center='trimmed' Xsq1, pval1 = stats.fligner(x, y, center='trimmed', proportiontocut=0.125) # Trim the data here, and use center='mean' Xsq2, pval2 = stats.fligner(x[1:-1], y[1:-1], center='mean') # Result should be the same. assert_almost_equal(Xsq1, Xsq2) assert_almost_equal(pval1, pval2) # The following test looks reasonable at first, but fligner() uses the # function stats.rankdata(), and in one of the cases in this test, # there are ties, while in the other (because of normal rounding # errors) there are not. This difference leads to differences in the # third significant digit of W. # #def test_equal_mean_median(self): # x = np.linspace(-1,1,21) # y = x**3 # W1, pval1 = stats.fligner(x, y, center='mean') # W2, pval2 = stats.fligner(x, y, center='median') # assert_almost_equal(W1, W2) # assert_almost_equal(pval1, pval2) def test_bad_keyword(self): x = np.linspace(-1,1,21) assert_raises(TypeError, stats.fligner, x, x, portiontocut=0.1) def test_bad_center_value(self): x = np.linspace(-1,1,21) assert_raises(ValueError, stats.fligner, x, x, center='trim') def test_bad_num_args(self): # Too few args raises ValueError. assert_raises(ValueError, stats.fligner, [1]) class TestMood(TestCase): def test_mood(self): # numbers from R: mood.test in package stats x1 = np.arange(5) assert_array_almost_equal(stats.mood(x1, x1**2), (-1.3830857299399906, 0.16663858066771478), 11) def test_mood_order_of_args(self): # z should change sign when the order of arguments changes, pvalue # should not change np.random.seed(1234) x1 = np.random.randn(10, 1) x2 = np.random.randn(15, 1) z1, p1 = stats.mood(x1, x2) z2, p2 = stats.mood(x2, x1) assert_array_almost_equal([z1, p1], [-z2, p2]) def test_mood_with_axis_none(self): #Test with axis = None, compare with results from R x1 = [-0.626453810742332, 0.183643324222082, -0.835628612410047, 1.59528080213779, 0.329507771815361, -0.820468384118015, 0.487429052428485, 0.738324705129217, 0.575781351653492, -0.305388387156356, 1.51178116845085, 0.389843236411431, -0.621240580541804, -2.2146998871775, 1.12493091814311, -0.0449336090152309, -0.0161902630989461, 0.943836210685299, 0.821221195098089, 0.593901321217509] x2 = [-0.896914546624981, 0.184849184646742, 1.58784533120882, -1.13037567424629, -0.0802517565509893, 0.132420284381094, 0.707954729271733, -0.23969802417184, 1.98447393665293, -0.138787012119665, 0.417650750792556, 0.981752777463662, -0.392695355503813, -1.03966897694891, 1.78222896030858, -2.31106908460517, 0.878604580921265, 0.035806718015226, 1.01282869212708, 0.432265154539617, 2.09081920524915, -1.19992581964387, 1.58963820029007, 1.95465164222325, 0.00493777682814261, -2.45170638784613, 0.477237302613617, -0.596558168631403, 0.792203270299649, 0.289636710177348] x1 = np.array(x1) x2 = np.array(x2) x1.shape = (10, 2) x2.shape = (15, 2) assert_array_almost_equal(stats.mood(x1, x2, axis=None), [-1.31716607555, 0.18778296257]) def test_mood_2d(self): # Test if the results of mood test in 2-D case are consistent with the # R result for the same inputs. Numbers from R mood.test(). ny = 5 np.random.seed(1234) x1 = np.random.randn(10, ny) x2 = np.random.randn(15, ny) z_vectest, pval_vectest = stats.mood(x1, x2) for j in range(ny): assert_array_almost_equal([z_vectest[j], pval_vectest[j]], stats.mood(x1[:, j], x2[:, j])) # inverse order of dimensions x1 = x1.transpose() x2 = x2.transpose() z_vectest, pval_vectest = stats.mood(x1, x2, axis=1) for i in range(ny): # check axis handling is self consistent assert_array_almost_equal([z_vectest[i], pval_vectest[i]], stats.mood(x1[i, :], x2[i, :])) def test_mood_3d(self): shape = (10, 5, 6) np.random.seed(1234) x1 = np.random.randn(*shape) x2 = np.random.randn(*shape) for axis in range(3): z_vectest, pval_vectest = stats.mood(x1, x2, axis=axis) # Tests that result for 3-D arrays is equal to that for the # same calculation on a set of 1-D arrays taken from the # 3-D array axes_idx = ([1, 2], [0, 2], [0, 1]) # the two axes != axis for i in range(shape[axes_idx[axis][0]]): for j in range(shape[axes_idx[axis][1]]): if axis == 0: slice1 = x1[:, i, j] slice2 = x2[:, i, j] elif axis == 1: slice1 = x1[i, :, j] slice2 = x2[i, :, j] else: slice1 = x1[i, j, :] slice2 = x2[i, j, :] assert_array_almost_equal([z_vectest[i, j], pval_vectest[i, j]], stats.mood(slice1, slice2)) def test_mood_bad_arg(self): # Raise ValueError when the sum of the lengths of the args is less than 3 assert_raises(ValueError, stats.mood, [1], []) class TestProbplot(TestCase): def test_basic(self): np.random.seed(12345) x = stats.norm.rvs(size=20) osm, osr = stats.probplot(x, fit=False) osm_expected = [-1.8241636, -1.38768012, -1.11829229, -0.91222575, -0.73908135, -0.5857176, -0.44506467, -0.31273668, -0.18568928, -0.06158146, 0.06158146, 0.18568928, 0.31273668, 0.44506467, 0.5857176, 0.73908135, 0.91222575, 1.11829229, 1.38768012, 1.8241636] assert_allclose(osr, np.sort(x)) assert_allclose(osm, osm_expected) res, res_fit = stats.probplot(x, fit=True) res_fit_expected = [1.05361841, 0.31297795, 0.98741609] assert_allclose(res_fit, res_fit_expected) def test_sparams_keyword(self): np.random.seed(123456) x = stats.norm.rvs(size=100) # Check that None, () and 0 (loc=0, for normal distribution) all work # and give the same results osm1, osr1 = stats.probplot(x, sparams=None, fit=False) osm2, osr2 = stats.probplot(x, sparams=0, fit=False) osm3, osr3 = stats.probplot(x, sparams=(), fit=False) assert_allclose(osm1, osm2) assert_allclose(osm1, osm3) assert_allclose(osr1, osr2) assert_allclose(osr1, osr3) # Check giving (loc, scale) params for normal distribution osm, osr = stats.probplot(x, sparams=(), fit=False) def test_dist_keyword(self): np.random.seed(12345) x = stats.norm.rvs(size=20) osm1, osr1 = stats.probplot(x, fit=False, dist='t', sparams=(3,)) osm2, osr2 = stats.probplot(x, fit=False, dist=stats.t, sparams=(3,)) assert_allclose(osm1, osm2) assert_allclose(osr1, osr2) assert_raises(ValueError, stats.probplot, x, dist='wrong-dist-name') assert_raises(AttributeError, stats.probplot, x, dist=[]) class custom_dist(object): """Some class that looks just enough like a distribution.""" def ppf(self, q): return stats.norm.ppf(q, loc=2) osm1, osr1 = stats.probplot(x, sparams=(2,), fit=False) osm2, osr2 = stats.probplot(x, dist=custom_dist(), fit=False) assert_allclose(osm1, osm2) assert_allclose(osr1, osr2) @dec.skipif(not have_matplotlib) def test_plot_kwarg(self): np.random.seed(7654321) fig = plt.figure() fig.add_subplot(111) x = stats.t.rvs(3, size=100) res1, fitres1 = stats.probplot(x, plot=plt) plt.close() res2, fitres2 = stats.probplot(x, plot=None) res3 = stats.probplot(x, fit=False, plot=plt) plt.close() res4 = stats.probplot(x, fit=False, plot=None) # Check that results are consistent between combinations of `fit` and # `plot` keywords. assert_(len(res1) == len(res2) == len(res3) == len(res4) == 2) assert_allclose(res1, res2) assert_allclose(res1, res3) assert_allclose(res1, res4) assert_allclose(fitres1, fitres2) # Check that a Matplotlib Axes object is accepted fig = plt.figure() ax = fig.add_subplot(111) stats.probplot(x, fit=False, plot=ax) plt.close() def test_probplot_bad_args(self): # Raise ValueError when given an invalid distribution. assert_raises(ValueError, stats.probplot, [1], dist="plate_of_shrimp") def test_wilcoxon_bad_arg(): # Raise ValueError when two args of different lengths are given or # zero_method is unknown. assert_raises(ValueError, stats.wilcoxon, [1], [1,2]) assert_raises(ValueError, stats.wilcoxon, [1,2], [1,2], "dummy") def test_mvsdist_bad_arg(): # Raise ValueError if fewer than two data points are given. data = [1] assert_raises(ValueError, stats.mvsdist, data) def test_kstat_bad_arg(): # Raise ValueError if n > 4 or n > 1. data = [1] n = 10 assert_raises(ValueError, stats.kstat, data, n=n) def test_kstatvar_bad_arg(): # Raise ValueError is n is not 1 or 2. data = [1] n = 10 assert_raises(ValueError, stats.kstatvar, data, n=n) def test_ppcc_max_bad_arg(): # Raise ValueError when given an invalid distribution. data = [1] assert_raises(ValueError, stats.ppcc_max, data, dist="plate_of_shrimp") class TestBoxcox_llf(TestCase): def test_basic(self): np.random.seed(54321) x = stats.norm.rvs(size=10000, loc=10) lmbda = 1 llf = stats.boxcox_llf(lmbda, x) llf_expected = -x.size / 2. * np.log(np.sum(x.std()**2)) assert_allclose(llf, llf_expected) def test_array_like(self): np.random.seed(54321) x = stats.norm.rvs(size=100, loc=10) lmbda = 1 llf = stats.boxcox_llf(lmbda, x) llf2 = stats.boxcox_llf(lmbda, list(x)) assert_allclose(llf, llf2, rtol=1e-12) def test_2d_input(self): # Note: boxcox_llf() was already working with 2-D input (sort of), so # keep it like that. boxcox() doesn't work with 2-D input though, due # to brent() returning a scalar. np.random.seed(54321) x = stats.norm.rvs(size=100, loc=10) lmbda = 1 llf = stats.boxcox_llf(lmbda, x) llf2 = stats.boxcox_llf(lmbda, np.vstack([x, x]).T) assert_allclose([llf, llf], llf2, rtol=1e-12) def test_empty(self): assert_(np.isnan(stats.boxcox_llf(1, []))) class TestBoxcox(TestCase): def test_fixed_lmbda(self): np.random.seed(12345) x = stats.loggamma.rvs(5, size=50) + 5 xt = stats.boxcox(x, lmbda=1) assert_allclose(xt, x - 1) xt = stats.boxcox(x, lmbda=-1) assert_allclose(xt, 1 - 1/x) xt = stats.boxcox(x, lmbda=0) assert_allclose(xt, np.log(x)) # Also test that array_like input works xt = stats.boxcox(list(x), lmbda=0) assert_allclose(xt, np.log(x)) def test_lmbda_None(self): np.random.seed(1234567) # Start from normal rv's, do inverse transform to check that # optimization function gets close to the right answer. np.random.seed(1245) lmbda = 2.5 x = stats.norm.rvs(loc=10, size=50000) x_inv = (x * lmbda + 1)**(-lmbda) xt, maxlog = stats.boxcox(x_inv) assert_almost_equal(maxlog, -1 / lmbda, decimal=2) def test_alpha(self): np.random.seed(1234) x = stats.loggamma.rvs(5, size=50) + 5 # Some regular values for alpha, on a small sample size _, _, interval = stats.boxcox(x, alpha=0.75) assert_allclose(interval, [4.004485780226041, 5.138756355035744]) _, _, interval = stats.boxcox(x, alpha=0.05) assert_allclose(interval, [1.2138178554857557, 8.209033272375663]) # Try some extreme values, see we don't hit the N=500 limit x = stats.loggamma.rvs(7, size=500) + 15 _, _, interval = stats.boxcox(x, alpha=0.001) assert_allclose(interval, [0.3988867, 11.40553131]) _, _, interval = stats.boxcox(x, alpha=0.999) assert_allclose(interval, [5.83316246, 5.83735292]) def test_boxcox_bad_arg(self): # Raise ValueError if any data value is negative. x = np.array([-1]) assert_raises(ValueError, stats.boxcox, x) def test_empty(self): assert_(stats.boxcox([]).shape == (0,)) class TestBoxcoxNormmax(TestCase): def setUp(self): np.random.seed(12345) self.x = stats.loggamma.rvs(5, size=50) + 5 def test_pearsonr(self): maxlog = stats.boxcox_normmax(self.x) assert_allclose(maxlog, 1.804465, rtol=1e-6) def test_mle(self): maxlog = stats.boxcox_normmax(self.x, method='mle') assert_allclose(maxlog, 1.758101, rtol=1e-6) # Check that boxcox() uses 'mle' _, maxlog_boxcox = stats.boxcox(self.x) assert_allclose(maxlog_boxcox, maxlog) def test_all(self): maxlog_all = stats.boxcox_normmax(self.x, method='all') assert_allclose(maxlog_all, [1.804465, 1.758101], rtol=1e-6) class TestBoxcoxNormplot(TestCase): def setUp(self): np.random.seed(7654321) self.x = stats.loggamma.rvs(5, size=500) + 5 def test_basic(self): N = 5 lmbdas, ppcc = stats.boxcox_normplot(self.x, -10, 10, N=N) ppcc_expected = [0.57783375, 0.83610988, 0.97524311, 0.99756057, 0.95843297] assert_allclose(lmbdas, np.linspace(-10, 10, num=N)) assert_allclose(ppcc, ppcc_expected) @dec.skipif(not have_matplotlib) def test_plot_kwarg(self): # Check with the matplotlib.pyplot module fig = plt.figure() fig.add_subplot(111) stats.boxcox_normplot(self.x, -20, 20, plot=plt) plt.close() # Check that a Matplotlib Axes object is accepted fig.add_subplot(111) ax = fig.add_subplot(111) stats.boxcox_normplot(self.x, -20, 20, plot=ax) plt.close() def test_invalid_inputs(self): # `lb` has to be larger than `la` assert_raises(ValueError, stats.boxcox_normplot, self.x, 1, 0) # `x` can not contain negative values assert_raises(ValueError, stats.boxcox_normplot, [-1, 1], 0, 1) def test_empty(self): assert_(stats.boxcox_normplot([], 0, 1).size == 0) class TestCircFuncs(TestCase): def test_circfuncs(self): x = np.array([355,5,2,359,10,350]) M = stats.circmean(x, high=360) Mval = 0.167690146 assert_allclose(M, Mval, rtol=1e-7) V = stats.circvar(x, high=360) Vval = 42.51955609 assert_allclose(V, Vval, rtol=1e-7) S = stats.circstd(x, high=360) Sval = 6.520702116 assert_allclose(S, Sval, rtol=1e-7) def test_circfuncs_small(self): x = np.array([20,21,22,18,19,20.5,19.2]) M1 = x.mean() M2 = stats.circmean(x, high=360) assert_allclose(M2, M1, rtol=1e-5) V1 = x.var() V2 = stats.circvar(x, high=360) assert_allclose(V2, V1, rtol=1e-4) S1 = x.std() S2 = stats.circstd(x, high=360) assert_allclose(S2, S1, rtol=1e-4) def test_circmean_axis(self): x = np.array([[355,5,2,359,10,350], [351,7,4,352,9,349], [357,9,8,358,4,356]]) M1 = stats.circmean(x, high=360) M2 = stats.circmean(x.ravel(), high=360) assert_allclose(M1, M2, rtol=1e-14) M1 = stats.circmean(x, high=360, axis=1) M2 = [stats.circmean(x[i], high=360) for i in range(x.shape[0])] assert_allclose(M1, M2, rtol=1e-14) M1 = stats.circmean(x, high=360, axis=0) M2 = [stats.circmean(x[:,i], high=360) for i in range(x.shape[1])] assert_allclose(M1, M2, rtol=1e-14) def test_circvar_axis(self): x = np.array([[355,5,2,359,10,350], [351,7,4,352,9,349], [357,9,8,358,4,356]]) V1 = stats.circvar(x, high=360) V2 = stats.circvar(x.ravel(), high=360) assert_allclose(V1, V2, rtol=1e-11) V1 = stats.circvar(x, high=360, axis=1) V2 = [stats.circvar(x[i], high=360) for i in range(x.shape[0])] assert_allclose(V1, V2, rtol=1e-11) V1 = stats.circvar(x, high=360, axis=0) V2 = [stats.circvar(x[:,i], high=360) for i in range(x.shape[1])] assert_allclose(V1, V2, rtol=1e-11) def test_circstd_axis(self): x = np.array([[355,5,2,359,10,350], [351,7,4,352,9,349], [357,9,8,358,4,356]]) S1 = stats.circstd(x, high=360) S2 = stats.circstd(x.ravel(), high=360) assert_allclose(S1, S2, rtol=1e-11) S1 = stats.circstd(x, high=360, axis=1) S2 = [stats.circstd(x[i], high=360) for i in range(x.shape[0])] assert_allclose(S1, S2, rtol=1e-11) S1 = stats.circstd(x, high=360, axis=0) S2 = [stats.circstd(x[:,i], high=360) for i in range(x.shape[1])] assert_allclose(S1, S2, rtol=1e-11) def test_circfuncs_array_like(self): x = [355,5,2,359,10,350] assert_allclose(stats.circmean(x, high=360), 0.167690146, rtol=1e-7) assert_allclose(stats.circvar(x, high=360), 42.51955609, rtol=1e-7) assert_allclose(stats.circstd(x, high=360), 6.520702116, rtol=1e-7) def test_empty(self): assert_(np.isnan(stats.circmean([]))) assert_(np.isnan(stats.circstd([]))) assert_(np.isnan(stats.circvar([]))) def test_accuracy_wilcoxon(): freq = [1, 4, 16, 15, 8, 4, 5, 1, 2] nums = range(-4, 5) x = np.concatenate([[u] * v for u, v in zip(nums, freq)]) y = np.zeros(x.size) T, p = stats.wilcoxon(x, y, "pratt") assert_allclose(T, 423) assert_allclose(p, 0.00197547303533107) T, p = stats.wilcoxon(x, y, "zsplit") assert_allclose(T, 441) assert_allclose(p, 0.0032145343172473055) T, p = stats.wilcoxon(x, y, "wilcox") assert_allclose(T, 327) assert_allclose(p, 0.00641346115861) # Test the 'correction' option, using values computed in R with: # > wilcox.test(x, y, paired=TRUE, exact=FALSE, correct={FALSE,TRUE}) x = np.array([120, 114, 181, 188, 180, 146, 121, 191, 132, 113, 127, 112]) y = np.array([133, 143, 119, 189, 112, 199, 198, 113, 115, 121, 142, 187]) T, p = stats.wilcoxon(x, y, correction=False) assert_equal(T, 34) assert_allclose(p, 0.6948866, rtol=1e-6) T, p = stats.wilcoxon(x, y, correction=True) assert_equal(T, 34) assert_allclose(p, 0.7240817, rtol=1e-6) def test_wilcoxon_tie(): # Regression test for gh-2391. # Corresponding R code is: # > result = wilcox.test(rep(0.1, 10), exact=FALSE, correct=FALSE) # > result$p.value # [1] 0.001565402 # > result = wilcox.test(rep(0.1, 10), exact=FALSE, correct=TRUE) # > result$p.value # [1] 0.001904195 stat, p = stats.wilcoxon([0.1] * 10) expected_p = 0.001565402 assert_equal(stat, 0) assert_allclose(p, expected_p, rtol=1e-6) stat, p = stats.wilcoxon([0.1] * 10, correction=True) expected_p = 0.001904195 assert_equal(stat, 0) assert_allclose(p, expected_p, rtol=1e-6) class TestMedianTest(TestCase): def test_bad_n_samples(self): # median_test requires at least two samples. assert_raises(ValueError, stats.median_test, [1, 2, 3]) def test_empty_sample(self): # Each sample must contain at least one value. assert_raises(ValueError, stats.median_test, [], [1, 2, 3]) def test_empty_when_ties_ignored(self): # The grand median is 1, and all values in the first argument are # equal to the grand median. With ties="ignore", those values are # ignored, which results in the first sample being (in effect) empty. # This should raise a ValueError. assert_raises(ValueError, stats.median_test, [1, 1, 1, 1], [2, 0, 1], [2, 0], ties="ignore") def test_empty_contingency_row(self): # The grand median is 1, and with the default ties="below", all the # values in the samples are counted as being below the grand median. # This would result a row of zeros in the contingency table, which is # an error. assert_raises(ValueError, stats.median_test, [1, 1, 1], [1, 1, 1]) # With ties="above", all the values are counted as above the # grand median. assert_raises(ValueError, stats.median_test, [1, 1, 1], [1, 1, 1], ties="above") def test_bad_ties(self): assert_raises(ValueError, stats.median_test, [1, 2, 3], [4, 5], ties="foo") def test_bad_keyword(self): assert_raises(TypeError, stats.median_test, [1, 2, 3], [4, 5], foo="foo") def test_simple(self): x = [1, 2, 3] y = [1, 2, 3] stat, p, med, tbl = stats.median_test(x, y) # The median is floating point, but this equality test should be safe. assert_equal(med, 2.0) assert_array_equal(tbl, [[1, 1], [2, 2]]) # The expected values of the contingency table equal the contingency table, # so the statistic should be 0 and the p-value should be 1. assert_equal(stat, 0) assert_equal(p, 1) def test_ties_options(self): # Test the contingency table calculation. x = [1, 2, 3, 4] y = [5, 6] z = [7, 8, 9] # grand median is 5. # Default 'ties' option is "below". stat, p, m, tbl = stats.median_test(x, y, z) assert_equal(m, 5) assert_equal(tbl, [[0, 1, 3], [4, 1, 0]]) stat, p, m, tbl = stats.median_test(x, y, z, ties="ignore") assert_equal(m, 5) assert_equal(tbl, [[0, 1, 3], [4, 0, 0]]) stat, p, m, tbl = stats.median_test(x, y, z, ties="above") assert_equal(m, 5) assert_equal(tbl, [[0, 2, 3], [4, 0, 0]]) def test_basic(self): # median_test calls chi2_contingency to compute the test statistic # and p-value. Make sure it hasn't screwed up the call... x = [1, 2, 3, 4, 5] y = [2, 4, 6, 8] stat, p, m, tbl = stats.median_test(x, y) assert_equal(m, 4) assert_equal(tbl, [[1, 2], [4, 2]]) exp_stat, exp_p, dof, e = stats.chi2_contingency(tbl) assert_allclose(stat, exp_stat) assert_allclose(p, exp_p) stat, p, m, tbl = stats.median_test(x, y, lambda_=0) assert_equal(m, 4) assert_equal(tbl, [[1, 2], [4, 2]]) exp_stat, exp_p, dof, e = stats.chi2_contingency(tbl, lambda_=0) assert_allclose(stat, exp_stat) assert_allclose(p, exp_p) stat, p, m, tbl = stats.median_test(x, y, correction=False) assert_equal(m, 4) assert_equal(tbl, [[1, 2], [4, 2]]) exp_stat, exp_p, dof, e = stats.chi2_contingency(tbl, correction=False) assert_allclose(stat, exp_stat) assert_allclose(p, exp_p) if __name__ == "__main__": run_module_suite()
mit
mduggan/toumeika
shikin/review.py
1
4611
# -*- coding: utf-8 -*- """ Shikin review page and associated API """ from sqlalchemy import func import datetime import random from flask import render_template, abort, request, jsonify, session from . import app, ocrfix from .model import DocSegment, DocSegmentReview, User from .util import dologin def get_user_or_abort(): # if request.remote_addr == '127.0.0.1': # user = 'admin' # else: user = session.get('username') if not user: abort(403) u = User.query.filter(User.name == user).first() if not u: abort(403) return u @app.route('/api/reviewcount/<user>') def review_count(user): u = User.query.filter(User.name == user).first() if not u: return abort(404) return jsonify({'user': user, 'count': len(u.reviews)}) @app.route('/api/unreview/<int:segmentid>') def unreview(segmentid): user = get_user_or_abort() revid = request.args.get('revid') ds = DocSegment.query.filter(DocSegment.id == segmentid).first() if not ds: abort(404) ds.viewcount = max(0, ds.viewcount-1) app.dbobj.session.add(ds) if not revid or not revid.isdigit(): app.dbobj.session.commit() return revid = int(revid) old = DocSegmentReview.query.filter(DocSegmentReview.id == revid, DocSegmentReview.user_id == user.id).first() if not old: abort(404) app.dbobj.session.delete(old) app.dbobj.session.commit() return jsonify({'status': 'ok', 'id': revid}) @app.route('/api/review/<int:segmentid>') def review_submit(segmentid): user = get_user_or_abort() ds = DocSegment.query.filter(DocSegment.id == segmentid).first() if not ds: abort(404) text = request.args.get('text') skip = request.args.get('skip') if text is None and not skip: abort(404) timestamp = datetime.datetime.now() ds.viewcount += 1 app.dbobj.session.add(ds) if skip: app.dbobj.session.commit() return jsonify({'status': 'ok'}) old = DocSegmentReview.query\ .filter(DocSegmentReview.segment_id == ds.id)\ .order_by(DocSegmentReview.rev.desc())\ .first() if old is not None: rev = old.rev + 1 else: rev = 1 newrev = DocSegmentReview(segment=ds, rev=rev, timestamp=timestamp, user=user, text=text) app.dbobj.session.add(newrev) app.dbobj.session.commit() return jsonify({'status': 'ok', 'id': newrev.id}) @app.route('/api/reviewdata', methods=['GET']) def reviewdata(): # Find a random early page with lots of unreviewed items. This way even # with multiple simulteanous users they should get different pages. minviewcount = app.dbobj.session.query(func.min(DocSegment.viewcount)).one()[0] q = app.dbobj.session.query(DocSegment.doc_id, DocSegment.page)\ .filter(DocSegment.ocrtext != None)\ .filter(DocSegment.viewcount <= minviewcount)\ .distinct() pages = list(q.all()) app.logger.debug("%d pages with segments of only %d views" % (len(pages), minviewcount)) # FIXME: this kinda works, but as all the pages get reviewed it will tend # toward giving all users the same page. not really a problem until I have # more than 1 user. docid, page = random.choice(pages) q = DocSegment.query.filter(DocSegment.doc_id == docid)\ .filter(DocSegment.page == page)\ .filter(DocSegment.viewcount <= minviewcount) segments = q.all() if not segments: abort(404) segdata = [] for d in segments: if d.usertext is None: txt = ocrfix.guess_fix(d.ocrtext) suggests = ocrfix.suggestions(d) else: txt = d.usertext.text suggests = [] lines = max(len(d.ocrtext.splitlines()), len(txt.splitlines())) segdata.append(dict(ocrtext=d.ocrtext, text=txt, segment_id=d.id, x1=d.x1, x2=d.x2, y1=d.y1, y2=d.y2, textlines=lines, docid=docid, page=page+1, suggests=suggests)) return jsonify(dict(segments=segdata, docid=docid, page=page+1)) @app.route('/review', methods=['GET', 'POST']) def review(): """ Review page """ error = None user = None if request.method == 'POST': user, error = dologin() if 'username' in session: u = get_user_or_abort() uname = u.name else: uname = None return render_template('review.html', user=uname, error=error)
bsd-2-clause
direvus/ansible
lib/ansible/modules/network/nxos/nxos_vxlan_vtep.py
39
10851
#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'network'} DOCUMENTATION = ''' --- module: nxos_vxlan_vtep extends_documentation_fragment: nxos version_added: "2.2" short_description: Manages VXLAN Network Virtualization Endpoint (NVE). description: - Manages VXLAN Network Virtualization Endpoint (NVE) overlay interface that terminates VXLAN tunnels. author: Gabriele Gerbino (@GGabriele) notes: - Tested against NXOSv 7.3.(0)D1(1) on VIRL - The module is used to manage NVE properties, not to create NVE interfaces. Use M(nxos_interface) if you wish to do so. - C(state=absent) removes the interface. - Default, where supported, restores params default value. options: interface: description: - Interface name for the VXLAN Network Virtualization Endpoint. required: true description: description: - Description of the NVE interface. host_reachability: description: - Specify mechanism for host reachability advertisement. type: bool shutdown: description: - Administratively shutdown the NVE interface. type: bool source_interface: description: - Specify the loopback interface whose IP address should be used for the NVE interface. source_interface_hold_down_time: description: - Suppresses advertisement of the NVE loopback address until the overlay has converged. state: description: - Determines whether the config should be present or not on the device. default: present choices: ['present','absent'] ''' EXAMPLES = ''' - nxos_vxlan_vtep: interface: nve1 description: default host_reachability: default source_interface: Loopback0 source_interface_hold_down_time: 30 shutdown: default ''' RETURN = ''' commands: description: commands sent to the device returned: always type: list sample: ["interface nve1", "source-interface loopback0", "source-interface hold-down-time 30", "description simple description", "shutdown", "host-reachability protocol bgp"] ''' import re from ansible.module_utils.network.nxos.nxos import get_config, load_config from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.common.config import CustomNetworkConfig BOOL_PARAMS = [ 'shutdown', 'host_reachability' ] PARAM_TO_COMMAND_KEYMAP = { 'description': 'description', 'host_reachability': 'host-reachability protocol bgp', 'interface': 'interface', 'shutdown': 'shutdown', 'source_interface': 'source-interface', 'source_interface_hold_down_time': 'source-interface hold-down-time' } PARAM_TO_DEFAULT_KEYMAP = { 'description': False, 'shutdown': True, 'source_interface_hold_down_time': '180', } def get_value(arg, config, module): if arg in BOOL_PARAMS: REGEX = re.compile(r'\s+{0}\s*$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M) NO_SHUT_REGEX = re.compile(r'\s+no shutdown\s*$', re.M) value = False if arg == 'shutdown': try: if NO_SHUT_REGEX.search(config): value = False elif REGEX.search(config): value = True except TypeError: value = False else: try: if REGEX.search(config): value = True except TypeError: value = False else: REGEX = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M) NO_DESC_REGEX = re.compile(r'\s+{0}\s*$'.format('no description'), re.M) SOURCE_INTF_REGEX = re.compile(r'(?:{0}\s)(?P<value>\S+)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M) value = '' if arg == 'description': if NO_DESC_REGEX.search(config): value = False elif PARAM_TO_COMMAND_KEYMAP[arg] in config: value = REGEX.search(config).group('value').strip() elif arg == 'source_interface': for line in config.splitlines(): try: if PARAM_TO_COMMAND_KEYMAP[arg] in config: value = SOURCE_INTF_REGEX.search(config).group('value').strip() break except AttributeError: value = '' else: if PARAM_TO_COMMAND_KEYMAP[arg] in config: value = REGEX.search(config).group('value').strip() return value def get_existing(module, args): existing = {} netcfg = CustomNetworkConfig(indent=2, contents=get_config(module, flags=['all'])) interface_string = 'interface {0}'.format(module.params['interface'].lower()) parents = [interface_string] config = netcfg.get_section(parents) if config: for arg in args: existing[arg] = get_value(arg, config, module) existing['interface'] = module.params['interface'].lower() else: if interface_string in str(netcfg): existing['interface'] = module.params['interface'].lower() for arg in args: existing[arg] = '' return existing def apply_key_map(key_map, table): new_dict = {} for key, value in table.items(): new_key = key_map.get(key) if new_key: value = table.get(key) if value: new_dict[new_key] = value else: new_dict[new_key] = value return new_dict def fix_commands(commands, module): source_interface_command = '' no_source_interface_command = '' for command in commands: if 'no source-interface hold-down-time' in command: pass elif 'source-interface hold-down-time' in command: pass elif 'no source-interface' in command: no_source_interface_command = command elif 'source-interface' in command: source_interface_command = command if source_interface_command: commands.pop(commands.index(source_interface_command)) commands.insert(0, source_interface_command) if no_source_interface_command: commands.pop(commands.index(no_source_interface_command)) commands.append(no_source_interface_command) return commands def state_present(module, existing, proposed, candidate): commands = list() proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed) existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing) for key, value in proposed_commands.items(): if value is True: commands.append(key) elif value is False: commands.append('no {0}'.format(key)) elif value == 'default': if existing_commands.get(key): existing_value = existing_commands.get(key) commands.append('no {0} {1}'.format(key, existing_value)) else: if key.replace(' ', '_').replace('-', '_') in BOOL_PARAMS: commands.append('no {0}'.format(key.lower())) module.exit_json(commands=commands) else: command = '{0} {1}'.format(key, value.lower()) commands.append(command) if commands: commands = fix_commands(commands, module) parents = ['interface {0}'.format(module.params['interface'].lower())] candidate.add(commands, parents=parents) else: if not existing and module.params['interface']: commands = ['interface {0}'.format(module.params['interface'].lower())] candidate.add(commands, parents=[]) def state_absent(module, existing, proposed, candidate): commands = ['no interface {0}'.format(module.params['interface'].lower())] candidate.add(commands, parents=[]) def main(): argument_spec = dict( interface=dict(required=True, type='str'), description=dict(required=False, type='str'), host_reachability=dict(required=False, type='bool'), shutdown=dict(required=False, type='bool'), source_interface=dict(required=False, type='str'), source_interface_hold_down_time=dict(required=False, type='str'), state=dict(choices=['present', 'absent'], default='present', required=False), ) argument_spec.update(nxos_argument_spec) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) warnings = list() result = {'changed': False, 'commands': [], 'warnings': warnings} check_args(module, warnings) state = module.params['state'] args = PARAM_TO_COMMAND_KEYMAP.keys() existing = get_existing(module, args) proposed_args = dict((k, v) for k, v in module.params.items() if v is not None and k in args) proposed = {} for key, value in proposed_args.items(): if key != 'interface': if str(value).lower() == 'default': value = PARAM_TO_DEFAULT_KEYMAP.get(key) if value is None: if key in BOOL_PARAMS: value = False else: value = 'default' if str(existing.get(key)).lower() != str(value).lower(): proposed[key] = value candidate = CustomNetworkConfig(indent=3) if state == 'present': if not existing: warnings.append("The proposed NVE interface did not exist. " "It's recommended to use nxos_interface to create " "all logical interfaces.") state_present(module, existing, proposed, candidate) elif state == 'absent' and existing: state_absent(module, existing, proposed, candidate) if candidate: candidate = candidate.items_text() result['commands'] = candidate result['changed'] = True load_config(module, candidate) module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
shuggiefisher/potato
django/contrib/gis/db/models/query.py
290
35968
from django.db import connections from django.db.models.query import QuerySet, Q, ValuesQuerySet, ValuesListQuerySet from django.contrib.gis.db.models import aggregates from django.contrib.gis.db.models.fields import get_srid_info, GeometryField, PointField, LineStringField from django.contrib.gis.db.models.sql import AreaField, DistanceField, GeomField, GeoQuery, GeoWhereNode from django.contrib.gis.geometry.backend import Geometry from django.contrib.gis.measure import Area, Distance class GeoQuerySet(QuerySet): "The Geographic QuerySet." ### Methods overloaded from QuerySet ### def __init__(self, model=None, query=None, using=None): super(GeoQuerySet, self).__init__(model=model, query=query, using=using) self.query = query or GeoQuery(self.model) def values(self, *fields): return self._clone(klass=GeoValuesQuerySet, setup=True, _fields=fields) def values_list(self, *fields, **kwargs): flat = kwargs.pop('flat', False) if kwargs: raise TypeError('Unexpected keyword arguments to values_list: %s' % (kwargs.keys(),)) if flat and len(fields) > 1: raise TypeError("'flat' is not valid when values_list is called with more than one field.") return self._clone(klass=GeoValuesListQuerySet, setup=True, flat=flat, _fields=fields) ### GeoQuerySet Methods ### def area(self, tolerance=0.05, **kwargs): """ Returns the area of the geographic field in an `area` attribute on each element of this GeoQuerySet. """ # Peforming setup here rather than in `_spatial_attribute` so that # we can get the units for `AreaField`. procedure_args, geo_field = self._spatial_setup('area', field_name=kwargs.get('field_name', None)) s = {'procedure_args' : procedure_args, 'geo_field' : geo_field, 'setup' : False, } connection = connections[self.db] backend = connection.ops if backend.oracle: s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s' s['procedure_args']['tolerance'] = tolerance s['select_field'] = AreaField('sq_m') # Oracle returns area in units of meters. elif backend.postgis or backend.spatialite: if backend.geography: # Geography fields support area calculation, returns square meters. s['select_field'] = AreaField('sq_m') elif not geo_field.geodetic(connection): # Getting the area units of the geographic field. s['select_field'] = AreaField(Area.unit_attname(geo_field.units_name(connection))) else: # TODO: Do we want to support raw number areas for geodetic fields? raise Exception('Area on geodetic coordinate systems not supported.') return self._spatial_attribute('area', s, **kwargs) def centroid(self, **kwargs): """ Returns the centroid of the geographic field in a `centroid` attribute on each element of this GeoQuerySet. """ return self._geom_attribute('centroid', **kwargs) def collect(self, **kwargs): """ Performs an aggregate collect operation on the given geometry field. This is analagous to a union operation, but much faster because boundaries are not dissolved. """ return self._spatial_aggregate(aggregates.Collect, **kwargs) def difference(self, geom, **kwargs): """ Returns the spatial difference of the geographic field in a `difference` attribute on each element of this GeoQuerySet. """ return self._geomset_attribute('difference', geom, **kwargs) def distance(self, geom, **kwargs): """ Returns the distance from the given geographic field name to the given geometry in a `distance` attribute on each element of the GeoQuerySet. Keyword Arguments: `spheroid` => If the geometry field is geodetic and PostGIS is the spatial database, then the more accurate spheroid calculation will be used instead of the quicker sphere calculation. `tolerance` => Used only for Oracle. The tolerance is in meters -- a default of 5 centimeters (0.05) is used. """ return self._distance_attribute('distance', geom, **kwargs) def envelope(self, **kwargs): """ Returns a Geometry representing the bounding box of the Geometry field in an `envelope` attribute on each element of the GeoQuerySet. """ return self._geom_attribute('envelope', **kwargs) def extent(self, **kwargs): """ Returns the extent (aggregate) of the features in the GeoQuerySet. The extent will be returned as a 4-tuple, consisting of (xmin, ymin, xmax, ymax). """ return self._spatial_aggregate(aggregates.Extent, **kwargs) def extent3d(self, **kwargs): """ Returns the aggregate extent, in 3D, of the features in the GeoQuerySet. It is returned as a 6-tuple, comprising: (xmin, ymin, zmin, xmax, ymax, zmax). """ return self._spatial_aggregate(aggregates.Extent3D, **kwargs) def force_rhr(self, **kwargs): """ Returns a modified version of the Polygon/MultiPolygon in which all of the vertices follow the Right-Hand-Rule. By default, this is attached as the `force_rhr` attribute on each element of the GeoQuerySet. """ return self._geom_attribute('force_rhr', **kwargs) def geojson(self, precision=8, crs=False, bbox=False, **kwargs): """ Returns a GeoJSON representation of the geomtry field in a `geojson` attribute on each element of the GeoQuerySet. The `crs` and `bbox` keywords may be set to True if the users wants the coordinate reference system and the bounding box to be included in the GeoJSON representation of the geometry. """ backend = connections[self.db].ops if not backend.geojson: raise NotImplementedError('Only PostGIS 1.3.4+ supports GeoJSON serialization.') if not isinstance(precision, (int, long)): raise TypeError('Precision keyword must be set with an integer.') # Setting the options flag -- which depends on which version of # PostGIS we're using. if backend.spatial_version >= (1, 4, 0): options = 0 if crs and bbox: options = 3 elif bbox: options = 1 elif crs: options = 2 else: options = 0 if crs and bbox: options = 3 elif crs: options = 1 elif bbox: options = 2 s = {'desc' : 'GeoJSON', 'procedure_args' : {'precision' : precision, 'options' : options}, 'procedure_fmt' : '%(geo_col)s,%(precision)s,%(options)s', } return self._spatial_attribute('geojson', s, **kwargs) def geohash(self, precision=20, **kwargs): """ Returns a GeoHash representation of the given field in a `geohash` attribute on each element of the GeoQuerySet. The `precision` keyword may be used to custom the number of _characters_ used in the output GeoHash, the default is 20. """ s = {'desc' : 'GeoHash', 'procedure_args': {'precision': precision}, 'procedure_fmt': '%(geo_col)s,%(precision)s', } return self._spatial_attribute('geohash', s, **kwargs) def gml(self, precision=8, version=2, **kwargs): """ Returns GML representation of the given field in a `gml` attribute on each element of the GeoQuerySet. """ backend = connections[self.db].ops s = {'desc' : 'GML', 'procedure_args' : {'precision' : precision}} if backend.postgis: # PostGIS AsGML() aggregate function parameter order depends on the # version -- uggh. if backend.spatial_version > (1, 3, 1): procedure_fmt = '%(version)s,%(geo_col)s,%(precision)s' else: procedure_fmt = '%(geo_col)s,%(precision)s,%(version)s' s['procedure_args'] = {'precision' : precision, 'version' : version} return self._spatial_attribute('gml', s, **kwargs) def intersection(self, geom, **kwargs): """ Returns the spatial intersection of the Geometry field in an `intersection` attribute on each element of this GeoQuerySet. """ return self._geomset_attribute('intersection', geom, **kwargs) def kml(self, **kwargs): """ Returns KML representation of the geometry field in a `kml` attribute on each element of this GeoQuerySet. """ s = {'desc' : 'KML', 'procedure_fmt' : '%(geo_col)s,%(precision)s', 'procedure_args' : {'precision' : kwargs.pop('precision', 8)}, } return self._spatial_attribute('kml', s, **kwargs) def length(self, **kwargs): """ Returns the length of the geometry field as a `Distance` object stored in a `length` attribute on each element of this GeoQuerySet. """ return self._distance_attribute('length', None, **kwargs) def make_line(self, **kwargs): """ Creates a linestring from all of the PointField geometries in the this GeoQuerySet and returns it. This is a spatial aggregate method, and thus returns a geometry rather than a GeoQuerySet. """ return self._spatial_aggregate(aggregates.MakeLine, geo_field_type=PointField, **kwargs) def mem_size(self, **kwargs): """ Returns the memory size (number of bytes) that the geometry field takes in a `mem_size` attribute on each element of this GeoQuerySet. """ return self._spatial_attribute('mem_size', {}, **kwargs) def num_geom(self, **kwargs): """ Returns the number of geometries if the field is a GeometryCollection or Multi* Field in a `num_geom` attribute on each element of this GeoQuerySet; otherwise the sets with None. """ return self._spatial_attribute('num_geom', {}, **kwargs) def num_points(self, **kwargs): """ Returns the number of points in the first linestring in the Geometry field in a `num_points` attribute on each element of this GeoQuerySet; otherwise sets with None. """ return self._spatial_attribute('num_points', {}, **kwargs) def perimeter(self, **kwargs): """ Returns the perimeter of the geometry field as a `Distance` object stored in a `perimeter` attribute on each element of this GeoQuerySet. """ return self._distance_attribute('perimeter', None, **kwargs) def point_on_surface(self, **kwargs): """ Returns a Point geometry guaranteed to lie on the surface of the Geometry field in a `point_on_surface` attribute on each element of this GeoQuerySet; otherwise sets with None. """ return self._geom_attribute('point_on_surface', **kwargs) def reverse_geom(self, **kwargs): """ Reverses the coordinate order of the geometry, and attaches as a `reverse` attribute on each element of this GeoQuerySet. """ s = {'select_field' : GeomField(),} kwargs.setdefault('model_att', 'reverse_geom') if connections[self.db].ops.oracle: s['geo_field_type'] = LineStringField return self._spatial_attribute('reverse', s, **kwargs) def scale(self, x, y, z=0.0, **kwargs): """ Scales the geometry to a new size by multiplying the ordinates with the given x,y,z scale factors. """ if connections[self.db].ops.spatialite: if z != 0.0: raise NotImplementedError('SpatiaLite does not support 3D scaling.') s = {'procedure_fmt' : '%(geo_col)s,%(x)s,%(y)s', 'procedure_args' : {'x' : x, 'y' : y}, 'select_field' : GeomField(), } else: s = {'procedure_fmt' : '%(geo_col)s,%(x)s,%(y)s,%(z)s', 'procedure_args' : {'x' : x, 'y' : y, 'z' : z}, 'select_field' : GeomField(), } return self._spatial_attribute('scale', s, **kwargs) def snap_to_grid(self, *args, **kwargs): """ Snap all points of the input geometry to the grid. How the geometry is snapped to the grid depends on how many arguments were given: - 1 argument : A single size to snap both the X and Y grids to. - 2 arguments: X and Y sizes to snap the grid to. - 4 arguments: X, Y sizes and the X, Y origins. """ if False in [isinstance(arg, (float, int, long)) for arg in args]: raise TypeError('Size argument(s) for the grid must be a float or integer values.') nargs = len(args) if nargs == 1: size = args[0] procedure_fmt = '%(geo_col)s,%(size)s' procedure_args = {'size' : size} elif nargs == 2: xsize, ysize = args procedure_fmt = '%(geo_col)s,%(xsize)s,%(ysize)s' procedure_args = {'xsize' : xsize, 'ysize' : ysize} elif nargs == 4: xsize, ysize, xorigin, yorigin = args procedure_fmt = '%(geo_col)s,%(xorigin)s,%(yorigin)s,%(xsize)s,%(ysize)s' procedure_args = {'xsize' : xsize, 'ysize' : ysize, 'xorigin' : xorigin, 'yorigin' : yorigin} else: raise ValueError('Must provide 1, 2, or 4 arguments to `snap_to_grid`.') s = {'procedure_fmt' : procedure_fmt, 'procedure_args' : procedure_args, 'select_field' : GeomField(), } return self._spatial_attribute('snap_to_grid', s, **kwargs) def svg(self, relative=False, precision=8, **kwargs): """ Returns SVG representation of the geographic field in a `svg` attribute on each element of this GeoQuerySet. Keyword Arguments: `relative` => If set to True, this will evaluate the path in terms of relative moves (rather than absolute). `precision` => May be used to set the maximum number of decimal digits used in output (defaults to 8). """ relative = int(bool(relative)) if not isinstance(precision, (int, long)): raise TypeError('SVG precision keyword argument must be an integer.') s = {'desc' : 'SVG', 'procedure_fmt' : '%(geo_col)s,%(rel)s,%(precision)s', 'procedure_args' : {'rel' : relative, 'precision' : precision, } } return self._spatial_attribute('svg', s, **kwargs) def sym_difference(self, geom, **kwargs): """ Returns the symmetric difference of the geographic field in a `sym_difference` attribute on each element of this GeoQuerySet. """ return self._geomset_attribute('sym_difference', geom, **kwargs) def translate(self, x, y, z=0.0, **kwargs): """ Translates the geometry to a new location using the given numeric parameters as offsets. """ if connections[self.db].ops.spatialite: if z != 0.0: raise NotImplementedError('SpatiaLite does not support 3D translation.') s = {'procedure_fmt' : '%(geo_col)s,%(x)s,%(y)s', 'procedure_args' : {'x' : x, 'y' : y}, 'select_field' : GeomField(), } else: s = {'procedure_fmt' : '%(geo_col)s,%(x)s,%(y)s,%(z)s', 'procedure_args' : {'x' : x, 'y' : y, 'z' : z}, 'select_field' : GeomField(), } return self._spatial_attribute('translate', s, **kwargs) def transform(self, srid=4326, **kwargs): """ Transforms the given geometry field to the given SRID. If no SRID is provided, the transformation will default to using 4326 (WGS84). """ if not isinstance(srid, (int, long)): raise TypeError('An integer SRID must be provided.') field_name = kwargs.get('field_name', None) tmp, geo_field = self._spatial_setup('transform', field_name=field_name) # Getting the selection SQL for the given geographic field. field_col = self._geocol_select(geo_field, field_name) # Why cascading substitutions? Because spatial backends like # Oracle and MySQL already require a function call to convert to text, thus # when there's also a transformation we need to cascade the substitutions. # For example, 'SDO_UTIL.TO_WKTGEOMETRY(SDO_CS.TRANSFORM( ... )' geo_col = self.query.custom_select.get(geo_field, field_col) # Setting the key for the field's column with the custom SELECT SQL to # override the geometry column returned from the database. custom_sel = '%s(%s, %s)' % (connections[self.db].ops.transform, geo_col, srid) # TODO: Should we have this as an alias? # custom_sel = '(%s(%s, %s)) AS %s' % (SpatialBackend.transform, geo_col, srid, qn(geo_field.name)) self.query.transformed_srid = srid # So other GeoQuerySet methods self.query.custom_select[geo_field] = custom_sel return self._clone() def union(self, geom, **kwargs): """ Returns the union of the geographic field with the given Geometry in a `union` attribute on each element of this GeoQuerySet. """ return self._geomset_attribute('union', geom, **kwargs) def unionagg(self, **kwargs): """ Performs an aggregate union on the given geometry field. Returns None if the GeoQuerySet is empty. The `tolerance` keyword is for Oracle backends only. """ return self._spatial_aggregate(aggregates.Union, **kwargs) ### Private API -- Abstracted DRY routines. ### def _spatial_setup(self, att, desc=None, field_name=None, geo_field_type=None): """ Performs set up for executing the spatial function. """ # Does the spatial backend support this? connection = connections[self.db] func = getattr(connection.ops, att, False) if desc is None: desc = att if not func: raise NotImplementedError('%s stored procedure not available on ' 'the %s backend.' % (desc, connection.ops.name)) # Initializing the procedure arguments. procedure_args = {'function' : func} # Is there a geographic field in the model to perform this # operation on? geo_field = self.query._geo_field(field_name) if not geo_field: raise TypeError('%s output only available on GeometryFields.' % func) # If the `geo_field_type` keyword was used, then enforce that # type limitation. if not geo_field_type is None and not isinstance(geo_field, geo_field_type): raise TypeError('"%s" stored procedures may only be called on %ss.' % (func, geo_field_type.__name__)) # Setting the procedure args. procedure_args['geo_col'] = self._geocol_select(geo_field, field_name) return procedure_args, geo_field def _spatial_aggregate(self, aggregate, field_name=None, geo_field_type=None, tolerance=0.05): """ DRY routine for calling aggregate spatial stored procedures and returning their result to the caller of the function. """ # Getting the field the geographic aggregate will be called on. geo_field = self.query._geo_field(field_name) if not geo_field: raise TypeError('%s aggregate only available on GeometryFields.' % aggregate.name) # Checking if there are any geo field type limitations on this # aggregate (e.g. ST_Makeline only operates on PointFields). if not geo_field_type is None and not isinstance(geo_field, geo_field_type): raise TypeError('%s aggregate may only be called on %ss.' % (aggregate.name, geo_field_type.__name__)) # Getting the string expression of the field name, as this is the # argument taken by `Aggregate` objects. agg_col = field_name or geo_field.name # Adding any keyword parameters for the Aggregate object. Oracle backends # in particular need an additional `tolerance` parameter. agg_kwargs = {} if connections[self.db].ops.oracle: agg_kwargs['tolerance'] = tolerance # Calling the QuerySet.aggregate, and returning only the value of the aggregate. return self.aggregate(geoagg=aggregate(agg_col, **agg_kwargs))['geoagg'] def _spatial_attribute(self, att, settings, field_name=None, model_att=None): """ DRY routine for calling a spatial stored procedure on a geometry column and attaching its output as an attribute of the model. Arguments: att: The name of the spatial attribute that holds the spatial SQL function to call. settings: Dictonary of internal settings to customize for the spatial procedure. Public Keyword Arguments: field_name: The name of the geographic field to call the spatial function on. May also be a lookup to a geometry field as part of a foreign key relation. model_att: The name of the model attribute to attach the output of the spatial function to. """ # Default settings. settings.setdefault('desc', None) settings.setdefault('geom_args', ()) settings.setdefault('geom_field', None) settings.setdefault('procedure_args', {}) settings.setdefault('procedure_fmt', '%(geo_col)s') settings.setdefault('select_params', []) connection = connections[self.db] backend = connection.ops # Performing setup for the spatial column, unless told not to. if settings.get('setup', True): default_args, geo_field = self._spatial_setup(att, desc=settings['desc'], field_name=field_name, geo_field_type=settings.get('geo_field_type', None)) for k, v in default_args.iteritems(): settings['procedure_args'].setdefault(k, v) else: geo_field = settings['geo_field'] # The attribute to attach to the model. if not isinstance(model_att, basestring): model_att = att # Special handling for any argument that is a geometry. for name in settings['geom_args']: # Using the field's get_placeholder() routine to get any needed # transformation SQL. geom = geo_field.get_prep_value(settings['procedure_args'][name]) params = geo_field.get_db_prep_lookup('contains', geom, connection=connection) geom_placeholder = geo_field.get_placeholder(geom, connection) # Replacing the procedure format with that of any needed # transformation SQL. old_fmt = '%%(%s)s' % name new_fmt = geom_placeholder % '%%s' settings['procedure_fmt'] = settings['procedure_fmt'].replace(old_fmt, new_fmt) settings['select_params'].extend(params) # Getting the format for the stored procedure. fmt = '%%(function)s(%s)' % settings['procedure_fmt'] # If the result of this function needs to be converted. if settings.get('select_field', False): sel_fld = settings['select_field'] if isinstance(sel_fld, GeomField) and backend.select: self.query.custom_select[model_att] = backend.select if connection.ops.oracle: sel_fld.empty_strings_allowed = False self.query.extra_select_fields[model_att] = sel_fld # Finally, setting the extra selection attribute with # the format string expanded with the stored procedure # arguments. return self.extra(select={model_att : fmt % settings['procedure_args']}, select_params=settings['select_params']) def _distance_attribute(self, func, geom=None, tolerance=0.05, spheroid=False, **kwargs): """ DRY routine for GeoQuerySet distance attribute routines. """ # Setting up the distance procedure arguments. procedure_args, geo_field = self._spatial_setup(func, field_name=kwargs.get('field_name', None)) # If geodetic defaulting distance attribute to meters (Oracle and # PostGIS spherical distances return meters). Otherwise, use the # units of the geometry field. connection = connections[self.db] geodetic = geo_field.geodetic(connection) geography = geo_field.geography if geodetic: dist_att = 'm' else: dist_att = Distance.unit_attname(geo_field.units_name(connection)) # Shortcut booleans for what distance function we're using and # whether the geometry field is 3D. distance = func == 'distance' length = func == 'length' perimeter = func == 'perimeter' if not (distance or length or perimeter): raise ValueError('Unknown distance function: %s' % func) geom_3d = geo_field.dim == 3 # The field's get_db_prep_lookup() is used to get any # extra distance parameters. Here we set up the # parameters that will be passed in to field's function. lookup_params = [geom or 'POINT (0 0)', 0] # Getting the spatial backend operations. backend = connection.ops # If the spheroid calculation is desired, either by the `spheroid` # keyword or when calculating the length of geodetic field, make # sure the 'spheroid' distance setting string is passed in so we # get the correct spatial stored procedure. if spheroid or (backend.postgis and geodetic and (not geography) and length): lookup_params.append('spheroid') lookup_params = geo_field.get_prep_value(lookup_params) params = geo_field.get_db_prep_lookup('distance_lte', lookup_params, connection=connection) # The `geom_args` flag is set to true if a geometry parameter was # passed in. geom_args = bool(geom) if backend.oracle: if distance: procedure_fmt = '%(geo_col)s,%(geom)s,%(tolerance)s' elif length or perimeter: procedure_fmt = '%(geo_col)s,%(tolerance)s' procedure_args['tolerance'] = tolerance else: # Getting whether this field is in units of degrees since the field may have # been transformed via the `transform` GeoQuerySet method. if self.query.transformed_srid: u, unit_name, s = get_srid_info(self.query.transformed_srid, connection) geodetic = unit_name in geo_field.geodetic_units if backend.spatialite and geodetic: raise ValueError('SQLite does not support linear distance calculations on geodetic coordinate systems.') if distance: if self.query.transformed_srid: # Setting the `geom_args` flag to false because we want to handle # transformation SQL here, rather than the way done by default # (which will transform to the original SRID of the field rather # than to what was transformed to). geom_args = False procedure_fmt = '%s(%%(geo_col)s, %s)' % (backend.transform, self.query.transformed_srid) if geom.srid is None or geom.srid == self.query.transformed_srid: # If the geom parameter srid is None, it is assumed the coordinates # are in the transformed units. A placeholder is used for the # geometry parameter. `GeomFromText` constructor is also needed # to wrap geom placeholder for SpatiaLite. if backend.spatialite: procedure_fmt += ', %s(%%%%s, %s)' % (backend.from_text, self.query.transformed_srid) else: procedure_fmt += ', %%s' else: # We need to transform the geom to the srid specified in `transform()`, # so wrapping the geometry placeholder in transformation SQL. # SpatiaLite also needs geometry placeholder wrapped in `GeomFromText` # constructor. if backend.spatialite: procedure_fmt += ', %s(%s(%%%%s, %s), %s)' % (backend.transform, backend.from_text, geom.srid, self.query.transformed_srid) else: procedure_fmt += ', %s(%%%%s, %s)' % (backend.transform, self.query.transformed_srid) else: # `transform()` was not used on this GeoQuerySet. procedure_fmt = '%(geo_col)s,%(geom)s' if not geography and geodetic: # Spherical distance calculation is needed (because the geographic # field is geodetic). However, the PostGIS ST_distance_sphere/spheroid() # procedures may only do queries from point columns to point geometries # some error checking is required. if not backend.geography: if not isinstance(geo_field, PointField): raise ValueError('Spherical distance calculation only supported on PointFields.') if not str(Geometry(buffer(params[0].ewkb)).geom_type) == 'Point': raise ValueError('Spherical distance calculation only supported with Point Geometry parameters') # The `function` procedure argument needs to be set differently for # geodetic distance calculations. if spheroid: # Call to distance_spheroid() requires spheroid param as well. procedure_fmt += ",'%(spheroid)s'" procedure_args.update({'function' : backend.distance_spheroid, 'spheroid' : params[1]}) else: procedure_args.update({'function' : backend.distance_sphere}) elif length or perimeter: procedure_fmt = '%(geo_col)s' if not geography and geodetic and length: # There's no `length_sphere`, and `length_spheroid` also # works on 3D geometries. procedure_fmt += ",'%(spheroid)s'" procedure_args.update({'function' : backend.length_spheroid, 'spheroid' : params[1]}) elif geom_3d and backend.postgis: # Use 3D variants of perimeter and length routines on PostGIS. if perimeter: procedure_args.update({'function' : backend.perimeter3d}) elif length: procedure_args.update({'function' : backend.length3d}) # Setting up the settings for `_spatial_attribute`. s = {'select_field' : DistanceField(dist_att), 'setup' : False, 'geo_field' : geo_field, 'procedure_args' : procedure_args, 'procedure_fmt' : procedure_fmt, } if geom_args: s['geom_args'] = ('geom',) s['procedure_args']['geom'] = geom elif geom: # The geometry is passed in as a parameter because we handled # transformation conditions in this routine. s['select_params'] = [backend.Adapter(geom)] return self._spatial_attribute(func, s, **kwargs) def _geom_attribute(self, func, tolerance=0.05, **kwargs): """ DRY routine for setting up a GeoQuerySet method that attaches a Geometry attribute (e.g., `centroid`, `point_on_surface`). """ s = {'select_field' : GeomField(),} if connections[self.db].ops.oracle: s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s' s['procedure_args'] = {'tolerance' : tolerance} return self._spatial_attribute(func, s, **kwargs) def _geomset_attribute(self, func, geom, tolerance=0.05, **kwargs): """ DRY routine for setting up a GeoQuerySet method that attaches a Geometry attribute and takes a Geoemtry parameter. This is used for geometry set-like operations (e.g., intersection, difference, union, sym_difference). """ s = {'geom_args' : ('geom',), 'select_field' : GeomField(), 'procedure_fmt' : '%(geo_col)s,%(geom)s', 'procedure_args' : {'geom' : geom}, } if connections[self.db].ops.oracle: s['procedure_fmt'] += ',%(tolerance)s' s['procedure_args']['tolerance'] = tolerance return self._spatial_attribute(func, s, **kwargs) def _geocol_select(self, geo_field, field_name): """ Helper routine for constructing the SQL to select the geographic column. Takes into account if the geographic field is in a ForeignKey relation to the current model. """ opts = self.model._meta if not geo_field in opts.fields: # Is this operation going to be on a related geographic field? # If so, it'll have to be added to the select related information # (e.g., if 'location__point' was given as the field name). self.query.add_select_related([field_name]) compiler = self.query.get_compiler(self.db) compiler.pre_sql_setup() rel_table, rel_col = self.query.related_select_cols[self.query.related_select_fields.index(geo_field)] return compiler._field_column(geo_field, rel_table) elif not geo_field in opts.local_fields: # This geographic field is inherited from another model, so we have to # use the db table for the _parent_ model instead. tmp_fld, parent_model, direct, m2m = opts.get_field_by_name(geo_field.name) return self.query.get_compiler(self.db)._field_column(geo_field, parent_model._meta.db_table) else: return self.query.get_compiler(self.db)._field_column(geo_field) class GeoValuesQuerySet(ValuesQuerySet): def __init__(self, *args, **kwargs): super(GeoValuesQuerySet, self).__init__(*args, **kwargs) # This flag tells `resolve_columns` to run the values through # `convert_values`. This ensures that Geometry objects instead # of string values are returned with `values()` or `values_list()`. self.query.geo_values = True class GeoValuesListQuerySet(GeoValuesQuerySet, ValuesListQuerySet): pass
bsd-3-clause
falkTX/Cadence
src/systray.py
1
23718
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # KDE, App-Indicator or Qt Systray # Copyright (C) 2011-2018 Filipe Coelho <falktx@falktx.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # For a full copy of the GNU General Public License see the COPYING file # Imports (Global) import os, sys if True: from PyQt5.QtCore import QTimer from PyQt5.QtGui import QIcon from PyQt5.QtWidgets import QAction, QMainWindow, QMenu, QSystemTrayIcon else: from PyQt4.QtCore import QTimer from PyQt4.QtGui import QIcon from PyQt4.QtGui import QAction, QMainWindow, QMenu, QSystemTrayIcon try: if False and os.getenv("DESKTOP_SESSION") in ("ubuntu", "ubuntu-2d") and not os.path.exists("/var/cadence/no_app_indicators"): from gi import require_version require_version('Gtk', '3.0') from gi.repository import Gtk require_version('AppIndicator3', '0.1') from gi.repository import AppIndicator3 as AppIndicator TrayEngine = "AppIndicator" #elif os.getenv("KDE_SESSION_VERSION") >= 5: #TrayEngine = "Qt" #elif os.getenv("KDE_FULL_SESSION") or os.getenv("DESKTOP_SESSION") == "kde-plasma": #from PyKDE5.kdeui import KAction, KIcon, KMenu, KStatusNotifierItem #TrayEngine = "KDE" else: TrayEngine = "Qt" except: TrayEngine = "Qt" print("Using Tray Engine '%s'" % TrayEngine) iActNameId = 0 iActWidget = 1 iActParentMenuId = 2 iActFunc = 3 iSepNameId = 0 iSepWidget = 1 iSepParentMenuId = 2 iMenuNameId = 0 iMenuWidget = 1 iMenuParentMenuId = 2 # Get Icon from user theme, using our own as backup (Oxygen) def getIcon(icon, size=16): return QIcon.fromTheme(icon, QIcon(":/%ix%i/%s.png" % (size, size, icon))) # Global Systray class class GlobalSysTray(object): def __init__(self, parent, name, icon): object.__init__(self) self._app = None self._parent = parent self._gtk_running = False self._quit_added = False self.act_indexes = [] self.sep_indexes = [] self.menu_indexes = [] if TrayEngine == "KDE": self.menu = KMenu(parent) self.menu.setTitle(name) self.tray = KStatusNotifierItem() self.tray.setAssociatedWidget(parent) self.tray.setCategory(KStatusNotifierItem.ApplicationStatus) self.tray.setContextMenu(self.menu) self.tray.setIconByPixmap(getIcon(icon)) self.tray.setTitle(name) self.tray.setToolTipTitle(" ") self.tray.setToolTipIconByPixmap(getIcon(icon)) # Double-click is managed by KDE elif TrayEngine == "AppIndicator": self.menu = Gtk.Menu() self.tray = AppIndicator.Indicator.new(name, icon, AppIndicator.IndicatorCategory.APPLICATION_STATUS) self.tray.set_menu(self.menu) # Double-click is not possible with App-Indicators elif TrayEngine == "Qt": self.menu = QMenu(parent) self.tray = QSystemTrayIcon(getIcon(icon)) self.tray.setContextMenu(self.menu) self.tray.setParent(parent) self.tray.activated.connect(self.qt_systray_clicked) # ------------------------------------------------------------------------------------------- def addAction(self, act_name_id, act_name_string, is_check=False): if TrayEngine == "KDE": act_widget = KAction(act_name_string, self.menu) act_widget.setCheckable(is_check) self.menu.addAction(act_widget) elif TrayEngine == "AppIndicator": if is_check: act_widget = Gtk.CheckMenuItem(act_name_string) else: act_widget = Gtk.ImageMenuItem(act_name_string) act_widget.set_image(None) act_widget.show() self.menu.append(act_widget) elif TrayEngine == "Qt": act_widget = QAction(act_name_string, self.menu) act_widget.setCheckable(is_check) self.menu.addAction(act_widget) else: act_widget = None act_obj = [None, None, None, None] act_obj[iActNameId] = act_name_id act_obj[iActWidget] = act_widget self.act_indexes.append(act_obj) def addSeparator(self, sep_name_id): if TrayEngine == "KDE": sep_widget = self.menu.addSeparator() elif TrayEngine == "AppIndicator": sep_widget = Gtk.SeparatorMenuItem() sep_widget.show() self.menu.append(sep_widget) elif TrayEngine == "Qt": sep_widget = self.menu.addSeparator() else: sep_widget = None sep_obj = [None, None, None] sep_obj[iSepNameId] = sep_name_id sep_obj[iSepWidget] = sep_widget self.sep_indexes.append(sep_obj) def addMenu(self, menu_name_id, menu_name_string): if TrayEngine == "KDE": menu_widget = KMenu(menu_name_string, self.menu) self.menu.addMenu(menu_widget) elif TrayEngine == "AppIndicator": menu_widget = Gtk.MenuItem(menu_name_string) menu_parent = Gtk.Menu() menu_widget.set_submenu(menu_parent) menu_widget.show() self.menu.append(menu_widget) elif TrayEngine == "Qt": menu_widget = QMenu(menu_name_string, self.menu) self.menu.addMenu(menu_widget) else: menu_widget = None menu_obj = [None, None, None] menu_obj[iMenuNameId] = menu_name_id menu_obj[iMenuWidget] = menu_widget self.menu_indexes.append(menu_obj) # ------------------------------------------------------------------------------------------- def addMenuAction(self, menu_name_id, act_name_id, act_name_string, is_check=False): i = self.get_menu_index(menu_name_id) if i < 0: return menu_widget = self.menu_indexes[i][iMenuWidget] if TrayEngine == "KDE": act_widget = KAction(act_name_string, menu_widget) act_widget.setCheckable(is_check) menu_widget.addAction(act_widget) elif TrayEngine == "AppIndicator": menu_widget = menu_widget.get_submenu() if is_check: act_widget = Gtk.CheckMenuItem(act_name_string) else: act_widget = Gtk.ImageMenuItem(act_name_string) act_widget.set_image(None) act_widget.show() menu_widget.append(act_widget) elif TrayEngine == "Qt": act_widget = QAction(act_name_string, menu_widget) act_widget.setCheckable(is_check) menu_widget.addAction(act_widget) else: act_widget = None act_obj = [None, None, None, None] act_obj[iActNameId] = act_name_id act_obj[iActWidget] = act_widget act_obj[iActParentMenuId] = menu_name_id self.act_indexes.append(act_obj) def addMenuSeparator(self, menu_name_id, sep_name_id): i = self.get_menu_index(menu_name_id) if i < 0: return menu_widget = self.menu_indexes[i][iMenuWidget] if TrayEngine == "KDE": sep_widget = menu_widget.addSeparator() elif TrayEngine == "AppIndicator": menu_widget = menu_widget.get_submenu() sep_widget = Gtk.SeparatorMenuItem() sep_widget.show() menu_widget.append(sep_widget) elif TrayEngine == "Qt": sep_widget = menu_widget.addSeparator() else: sep_widget = None sep_obj = [None, None, None] sep_obj[iSepNameId] = sep_name_id sep_obj[iSepWidget] = sep_widget sep_obj[iSepParentMenuId] = menu_name_id self.sep_indexes.append(sep_obj) #def addSubMenu(self, menu_name_id, new_menu_name_id, new_menu_name_string): #menu_index = self.get_menu_index(menu_name_id) #if menu_index < 0: return #menu_widget = self.menu_indexes[menu_index][1] ##if TrayEngine == "KDE": ##new_menu_widget = KMenu(new_menu_name_string, self.menu) ##menu_widget.addMenu(new_menu_widget) ##elif TrayEngine == "AppIndicator": ##new_menu_widget = Gtk.MenuItem(new_menu_name_string) ##new_menu_widget.show() ##menu_widget.get_submenu().append(new_menu_widget) ##parent_menu_widget = Gtk.Menu() ##new_menu_widget.set_submenu(parent_menu_widget) ##else: #if (1): #new_menu_widget = QMenu(new_menu_name_string, self.menu) #menu_widget.addMenu(new_menu_widget) #self.menu_indexes.append([new_menu_name_id, new_menu_widget, menu_name_id]) # ------------------------------------------------------------------------------------------- def connect(self, act_name_id, act_func): i = self.get_act_index(act_name_id) if i < 0: return act_widget = self.act_indexes[i][iActWidget] if TrayEngine == "AppIndicator": act_widget.connect("activate", self.gtk_call_func, act_name_id) elif TrayEngine in ("KDE", "Qt"): act_widget.triggered.connect(act_func) self.act_indexes[i][iActFunc] = act_func # ------------------------------------------------------------------------------------------- #def setActionChecked(self, act_name_id, yesno): #index = self.get_act_index(act_name_id) #if index < 0: return #act_widget = self.act_indexes[index][1] ##if TrayEngine == "KDE": ##act_widget.setChecked(yesno) ##elif TrayEngine == "AppIndicator": ##if type(act_widget) != Gtk.CheckMenuItem: ##return # Cannot continue ##act_widget.set_active(yesno) ##else: #if (1): #act_widget.setChecked(yesno) def setActionEnabled(self, act_name_id, yesno): i = self.get_act_index(act_name_id) if i < 0: return act_widget = self.act_indexes[i][iActWidget] if TrayEngine == "KDE": act_widget.setEnabled(yesno) elif TrayEngine == "AppIndicator": act_widget.set_sensitive(yesno) elif TrayEngine == "Qt": act_widget.setEnabled(yesno) def setActionIcon(self, act_name_id, icon): i = self.get_act_index(act_name_id) if i < 0: return act_widget = self.act_indexes[i][iActWidget] if TrayEngine == "KDE": act_widget.setIcon(KIcon(icon)) elif TrayEngine == "AppIndicator": if not isinstance(act_widget, Gtk.ImageMenuItem): # Cannot use icons here return act_widget.set_image(Gtk.Image.new_from_icon_name(icon, Gtk.IconSize.MENU)) #act_widget.set_always_show_image(True) elif TrayEngine == "Qt": act_widget.setIcon(getIcon(icon)) def setActionText(self, act_name_id, text): i = self.get_act_index(act_name_id) if i < 0: return act_widget = self.act_indexes[i][iActWidget] if TrayEngine == "KDE": act_widget.setText(text) elif TrayEngine == "AppIndicator": if isinstance(act_widget, Gtk.ImageMenuItem): # Fix icon reset last_icon = act_widget.get_image() act_widget.set_label(text) act_widget.set_image(last_icon) else: act_widget.set_label(text) elif TrayEngine == "Qt": act_widget.setText(text) def setIcon(self, icon): if TrayEngine == "KDE": self.tray.setIconByPixmap(getIcon(icon)) #self.tray.setToolTipIconByPixmap(getIcon(icon)) elif TrayEngine == "AppIndicator": self.tray.set_icon(icon) elif TrayEngine == "Qt": self.tray.setIcon(getIcon(icon)) def setToolTip(self, text): if TrayEngine == "KDE": self.tray.setToolTipSubTitle(text) elif TrayEngine == "AppIndicator": # ToolTips are disabled in App-Indicators by design pass elif TrayEngine == "Qt": self.tray.setToolTip(text) # ------------------------------------------------------------------------------------------- #def removeAction(self, act_name_id): #index = self.get_act_index(act_name_id) #if index < 0: return #act_widget = self.act_indexes[index][1] #parent_menu_widget = self.get_parent_menu_widget(self.act_indexes[index][2]) ##if TrayEngine == "KDE": ##parent_menu_widget.removeAction(act_widget) ##elif TrayEngine == "AppIndicator": ##act_widget.hide() ##parent_menu_widget.remove(act_widget) ##else: #if (1): #parent_menu_widget.removeAction(act_widget) #self.act_indexes.pop(index) #def removeSeparator(self, sep_name_id): #index = self.get_sep_index(sep_name_id) #if index < 0: return #sep_widget = self.sep_indexes[index][1] #parent_menu_widget = self.get_parent_menu_widget(self.sep_indexes[index][2]) ##if TrayEngine == "KDE": ##parent_menu_widget.removeAction(sep_widget) ##elif TrayEngine == "AppIndicator": ##sep_widget.hide() ##parent_menu_widget.remove(sep_widget) ##else: #if (1): #parent_menu_widget.removeAction(sep_widget) #self.sep_indexes.pop(index) #def removeMenu(self, menu_name_id): #index = self.get_menu_index(menu_name_id) #if index < 0: return #menu_widget = self.menu_indexes[index][1] #parent_menu_widget = self.get_parent_menu_widget(self.menu_indexes[index][2]) ##if TrayEngine == "KDE": ##parent_menu_widget.removeAction(menu_widget.menuAction()) ##elif TrayEngine == "AppIndicator": ##menu_widget.hide() ##parent_menu_widget.remove(menu_widget.get_submenu()) ##else: #if (1): #parent_menu_widget.removeAction(menu_widget.menuAction()) #self.remove_actions_by_menu_name_id(menu_name_id) #self.remove_separators_by_menu_name_id(menu_name_id) #self.remove_submenus_by_menu_name_id(menu_name_id) # ------------------------------------------------------------------------------------------- #def clearAll(self): ##if TrayEngine == "KDE": ##self.menu.clear() ##elif TrayEngine == "AppIndicator": ##for child in self.menu.get_children(): ##self.menu.remove(child) ##else: #if (1): #self.menu.clear() #self.act_indexes = [] #self.sep_indexes = [] #self.menu_indexes = [] #def clearMenu(self, menu_name_id): #menu_index = self.get_menu_index(menu_name_id) #if menu_index < 0: return #menu_widget = self.menu_indexes[menu_index][1] ##if TrayEngine == "KDE": ##menu_widget.clear() ##elif TrayEngine == "AppIndicator": ##for child in menu_widget.get_submenu().get_children(): ##menu_widget.get_submenu().remove(child) ##else: #if (1): #menu_widget.clear() #list_of_submenus = [menu_name_id] #for x in range(0, 10): # 10x level deep, should cover all cases... #for this_menu_name_id, menu_widget, parent_menu_id in self.menu_indexes: #if parent_menu_id in list_of_submenus and this_menu_name_id not in list_of_submenus: #list_of_submenus.append(this_menu_name_id) #for this_menu_name_id in list_of_submenus: #self.remove_actions_by_menu_name_id(this_menu_name_id) #self.remove_separators_by_menu_name_id(this_menu_name_id) #self.remove_submenus_by_menu_name_id(this_menu_name_id) # ------------------------------------------------------------------------------------------- def getTrayEngine(self): return TrayEngine def isTrayAvailable(self): if TrayEngine in ("KDE", "Qt"): # Ask Qt return QSystemTrayIcon.isSystemTrayAvailable() if TrayEngine == "AppIndicator": # Ubuntu/Unity always has a systray return True return False def handleQtCloseEvent(self, event): if self.isTrayAvailable() and self._parent.isVisible(): event.accept() self.__hideShowCall() return self.close() QMainWindow.closeEvent(self._parent, event) # ------------------------------------------------------------------------------------------- def show(self): if not self._quit_added: self._quit_added = True if TrayEngine != "KDE": self.addSeparator("_quit") self.addAction("show", self._parent.tr("Minimize")) self.addAction("quit", self._parent.tr("Quit")) self.setActionIcon("quit", "application-exit") self.connect("show", self.__hideShowCall) self.connect("quit", self.__quitCall) if TrayEngine == "KDE": self.tray.setStatus(KStatusNotifierItem.Active) elif TrayEngine == "AppIndicator": self.tray.set_status(AppIndicator.IndicatorStatus.ACTIVE) elif TrayEngine == "Qt": self.tray.show() def hide(self): if TrayEngine == "KDE": self.tray.setStatus(KStatusNotifierItem.Passive) elif TrayEngine == "AppIndicator": self.tray.set_status(AppIndicator.IndicatorStatus.PASSIVE) elif TrayEngine == "Qt": self.tray.hide() def close(self): if TrayEngine == "KDE": self.menu.close() elif TrayEngine == "AppIndicator": if self._gtk_running: self._gtk_running = False Gtk.main_quit() elif TrayEngine == "Qt": self.menu.close() def exec_(self, app): self._app = app if TrayEngine == "AppIndicator": self._gtk_running = True return Gtk.main() else: return app.exec_() # ------------------------------------------------------------------------------------------- def get_act_index(self, act_name_id): for i in range(len(self.act_indexes)): if self.act_indexes[i][iActNameId] == act_name_id: return i else: print("systray.py - Failed to get action index for %s" % act_name_id) return -1 def get_sep_index(self, sep_name_id): for i in range(len(self.sep_indexes)): if self.sep_indexes[i][iSepNameId] == sep_name_id: return i else: print("systray.py - Failed to get separator index for %s" % sep_name_id) return -1 def get_menu_index(self, menu_name_id): for i in range(len(self.menu_indexes)): if self.menu_indexes[i][iMenuNameId] == menu_name_id: return i else: print("systray.py - Failed to get menu index for %s" % menu_name_id) return -1 #def get_parent_menu_widget(self, parent_menu_id): #if parent_menu_id != None: #menu_index = self.get_menu_index(parent_menu_id) #if menu_index >= 0: #return self.menu_indexes[menu_index][1] #else: #print("systray.py::Failed to get parent Menu widget for", parent_menu_id) #return None #else: #return self.menu #def remove_actions_by_menu_name_id(self, menu_name_id): #h = 0 #for i in range(len(self.act_indexes)): #act_name_id, act_widget, parent_menu_id, act_func = self.act_indexes[i - h] #if parent_menu_id == menu_name_id: #self.act_indexes.pop(i - h) #h += 1 #def remove_separators_by_menu_name_id(self, menu_name_id): #h = 0 #for i in range(len(self.sep_indexes)): #sep_name_id, sep_widget, parent_menu_id = self.sep_indexes[i - h] #if parent_menu_id == menu_name_id: #self.sep_indexes.pop(i - h) #h += 1 #def remove_submenus_by_menu_name_id(self, submenu_name_id): #h = 0 #for i in range(len(self.menu_indexes)): #menu_name_id, menu_widget, parent_menu_id = self.menu_indexes[i - h] #if parent_menu_id == submenu_name_id: #self.menu_indexes.pop(i - h) #h += 1 # ------------------------------------------------------------------------------------------- def gtk_call_func(self, gtkmenu, act_name_id): i = self.get_act_index(act_name_id) if i < 0: return None return self.act_indexes[i][iActFunc] def qt_systray_clicked(self, reason): if reason in (QSystemTrayIcon.DoubleClick, QSystemTrayIcon.Trigger): self.__hideShowCall() # ------------------------------------------------------------------------------------------- def __hideShowCall(self): if self._parent.isVisible(): self.setActionText("show", self._parent.tr("Restore")) self._parent.hide() if self._app: self._app.setQuitOnLastWindowClosed(False) else: self.setActionText("show", self._parent.tr("Minimize")) if self._parent.isMaximized(): self._parent.showMaximized() else: self._parent.showNormal() if self._app: self._app.setQuitOnLastWindowClosed(True) QTimer.singleShot(500, self.__raiseWindow) def __quitCall(self): if self._app: self._app.setQuitOnLastWindowClosed(True) self._parent.hide() self._parent.close() if self._app: self._app.quit() def __raiseWindow(self): self._parent.activateWindow() self._parent.raise_() #--------------- main ------------------ if __name__ == '__main__': from PyQt5.QtWidgets import QApplication, QDialog, QMessageBox class ExampleGUI(QDialog): def __init__(self, parent=None): QDialog.__init__(self, parent) self.setWindowIcon(getIcon("audacity")) self.systray = GlobalSysTray(self, "Claudia", "claudia") self.systray.addAction("about", self.tr("About")) self.systray.setIcon("audacity") self.systray.setToolTip("Demo systray app") self.systray.connect("about", self.about) self.systray.show() def about(self): QMessageBox.about(self, self.tr("About"), self.tr("Systray Demo")) def done(self, r): QDialog.done(self, r) self.close() def closeEvent(self, event): self.systray.close() QDialog.closeEvent(self, event) app = QApplication(sys.argv) gui = ExampleGUI() gui.show() sys.exit(gui.systray.exec_(app))
gpl-2.0
Griffiths117/TG-s-IRC
client/IRClient.py
1
4985
import socket, _thread, tkinter as tk, tkinter.ttk as ttk from time import strftime, sleep from tkinter import messagebox, simpledialog #===========================================================================# class BasicInputDialog: def __init__(self,question,title=None,hideWindow=True): if title == None: title = PROGRAM_TITLE self.master = tk.Tk() self.string = '' self.master.title(title) self.frame = tk.Frame(self.master) self.frame.pack() self.acceptInput(question) self.waitForInput() try: self.inputted = self.getText() except Exception: quit() def acceptInput(self,question): r = self.frame k = ttk.Label(r,text=question) k.grid(row=0,column=0) self.e = ttk.Entry(r,width=30) self.e.grid(row=1,columnspan=2) self.e.focus_set() b = ttk.Button(r,text='Enter',command=self.getText) self.master.bind("<Return>", self.getText) b.grid(row=0,column=1,padx=5,pady=5) def getText(self,event=None): self.string = self.e.get() self.master.quit() return self.string def get(self): self.master.destroy() return self.inputted def getString(self): return self.string def waitForInput(self): self.master.mainloop() #Main window application class MainWindow(tk.Tk): def __init__(self, *args, **kwargs): tk.Tk.__init__(self, *args, **kwargs) self.title(PROGRAM_TITLE) self.resizable(0,0) self.displayBox = tk.Text(self, width=100, font=THEME.font, bg=THEME.colors[3], fg=THEME.colors[0]) self.displayBox.pack() self.displayBox.configure(state='disabled') self.msgEntry = tk.Entry(self,width=100, font=THEME.font, bg=THEME.colors[3], fg=THEME.colors[1], insertbackground = THEME.colors[2]) self.msgEntry.pack() self.bind("<Return>", self.sendText) def sendText(self,event=None): send(newMessage(self.msgEntry.get()).toString()) self.msgEntry.delete(0, 'end') class Theme: def __init__(self, font, colors): self.colors = colors #Message,input,cursor,background self.font = font class Message: #Static variables for formatting sep = "§" pref = "msg=" SUDO_PREF = "server=" #Initiate, if timestamp is not entered it will be current time def __init__(self, sender, plainText, timestamp = None): if timestamp == None: timestamp = strftime("%d-%m-%Y %H:%M:%S") self.plainText = plainText self.sender = sender self.timestamp = timestamp #Sends to string object to be sent through socket def toString(self): return self.pref + self.sender + self.sep + self.timestamp + self.sep + self.plainText #Turns recieved strings into messages: returns None if invalid. def fromString(text): if not text.startswith(Message.pref): return Message("SERVER",text[len(Message.SUDO_PREF):]) if text.startswith(Message.SUDO_PREF) else None data = text[len(Message.pref):].split(Message.sep,2) return Message(data[0],data[2],data[1]) #Converts into display string def toFormattedString(self): return "["+self.timestamp + "] <" + self.sender + ">: "+self.plainText #===========================================================================# def send(msg): try: SEND_SOCKET.send(bytes(msg,'UTF-8')) except: print("Unable to send message") def newMessage(msg): return Message(NICKNAME, msg) def waitForMessages(s,window): #This should be run in a seperate thread: constantly recieves new messages sleep(0.5) while True: #Recieve message and convert to string msg = s.recv(1024) msg = str(msg, "UTF-8") #Checking if message follows Message class format m = Message.fromString(msg) if m == None: continue msg = m.toFormattedString() #Show in window writeTo(window.displayBox,msg) def writeTo(textBox,msg): textBox.configure(state='normal') textBox.insert('end',msg) textBox.configure(state='disabled') textBox.see(tk.END) def shutdownHook(): send("!DISCONNECT") root.destroy() quit() #===========================================================================# PROGRAM_TITLE = 'TG\'s IRC' SERVER_IP = BasicInputDialog("Enter IP:").get() NICKNAME = BasicInputDialog("Enter Nickname:").get() THEME = Theme(("Consolas", 10), ['aqua', 'cyan', 'white', 'black']) RECV_SOCKET = socket.socket() RECV_SOCKET.connect((SERVER_IP, 20075)) SEND_SOCKET = socket.socket() SEND_SOCKET.connect((SERVER_IP, 20074)) send("!nickname="+NICKNAME) root = MainWindow() _thread.start_new_thread(waitForMessages, (RECV_SOCKET,root,)) root.protocol("WM_DELETE_WINDOW", shutdownHook) root.mainloop()
mit
hsaputra/tensorflow
tensorflow/contrib/sparsemax/__init__.py
106
1275
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Module that implements sparsemax and sparsemax loss, see [1]. [1] https://arxiv.org/abs/1602.02068 ## Sparsemax @@sparsemax @@sparsemax_loss """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.sparsemax.python.ops.sparsemax import sparsemax from tensorflow.contrib.sparsemax.python.ops.sparsemax_loss \ import sparsemax_loss from tensorflow.python.util.all_util import remove_undocumented _allowed_symbols = ['sparsemax', 'sparsemax_loss'] remove_undocumented(__name__, _allowed_symbols)
apache-2.0
patrickglass/creo
creo/packages/creoconfig/tests/test_interactive_prompt.py
2
1305
#!/usr/bin/env python """ Module test_interactive_prompt """ import os import sys sys.path.append(os.path.realpath('.')) from creoconfig import Config def interactive_prompt(): c = Config() c.add_option( 'strkey', prefix='Please enter string', help='This is a string key') c.add_option( 'intkey', prefix='Please enter integer value', help='This is a int key', type=int) c.add_option( 'choice_key', prefix='Please enter one of the integer choices', help='This is a int key which only allows certail values', type=int, choices=[1, 2, 3, 10]) c.add_option( 'choice_key_str', prefix='Please choose one of the string values', help='This is a string key which only allows certail values', type=str, choices=['a', 'b', 'c', '10']) c.prompt() c.data = 'mydataval' c.another = 'moredata' c.another1 = 'abcs' print c print c._store.__dict__ print c._available_keywords print c._isbatch # print "Missing: %s" % c.missingkey print("Configuration:") for k, v in c.iteritems(): print("\t%s: %s" % (k, v)) if __name__ == '__main__': print "INFO: Running interactive tests!" interactive_prompt()
apache-2.0
lgscofield/odoo
openerp/addons/base/module/wizard/base_import_language.py
337
2644
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import base64 from tempfile import TemporaryFile from openerp import tools from openerp.osv import osv, fields class base_language_import(osv.osv_memory): """ Language Import """ _name = "base.language.import" _description = "Language Import" _columns = { 'name': fields.char('Language Name', required=True), 'code': fields.char('ISO Code', size=5, help="ISO Language and Country code, e.g. en_US", required=True), 'data': fields.binary('File', required=True), 'overwrite': fields.boolean('Overwrite Existing Terms', help="If you enable this option, existing translations (including custom ones) " "will be overwritten and replaced by those in this file"), } def import_lang(self, cr, uid, ids, context=None): if context is None: context = {} this = self.browse(cr, uid, ids[0]) if this.overwrite: context = dict(context, overwrite=True) fileobj = TemporaryFile('w+') try: fileobj.write(base64.decodestring(this.data)) # now we determine the file format fileobj.seek(0) first_line = fileobj.readline().strip().replace('"', '').replace(' ', '') fileformat = first_line.endswith("type,name,res_id,src,value") and 'csv' or 'po' fileobj.seek(0) tools.trans_load_data(cr, fileobj, fileformat, this.code, lang_name=this.name, context=context) finally: fileobj.close() return True # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
xunilrj/sandbox
courses/MITx/MITx 6.86x Machine Learning with Python-From Linear Models to Deep Learning/project3/mnist/part2-twodigit/mlp.py
1
2432
import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from train_utils import batchify_data, run_epoch, train_model, Flatten import utils_multiMNIST as U path_to_data_dir = '../Datasets/' use_mini_dataset = True batch_size = 64 nb_classes = 10 nb_epoch = 30 num_classes = 10 img_rows, img_cols = 42, 28 # input image dimensions class MLP(nn.Module): def __init__(self, input_dimension): super(MLP, self).__init__() self.flatten = Flatten() self.l1 = nn.Linear(input_dimension, 64) self.o1 = nn.Linear(64, 10) self.o2 = nn.Linear(64, 10) self.model = nn.Sequential( self.flatten, self.l1, ) def forward(self, x): out = self.model(x) out_first_digit = self.o1(out) out_second_digit = self.o2(out) return out_first_digit, out_second_digit def main(): X_train, y_train, X_test, y_test = U.get_data(path_to_data_dir, use_mini_dataset) # Split into train and dev dev_split_index = int(9 * len(X_train) / 10) X_dev = X_train[dev_split_index:] y_dev = [y_train[0][dev_split_index:], y_train[1][dev_split_index:]] X_train = X_train[:dev_split_index] y_train = [y_train[0][:dev_split_index], y_train[1][:dev_split_index]] permutation = np.array([i for i in range(len(X_train))]) np.random.shuffle(permutation) X_train = [X_train[i] for i in permutation] y_train = [[y_train[0][i] for i in permutation], [y_train[1][i] for i in permutation]] # Split dataset into batches train_batches = batchify_data(X_train, y_train, batch_size) dev_batches = batchify_data(X_dev, y_dev, batch_size) test_batches = batchify_data(X_test, y_test, batch_size) # Load model input_dimension = img_rows * img_cols model = MLP(input_dimension) # TODO add proper layers to MLP class above # Train train_model(train_batches, dev_batches, model) ## Evaluate the model on test data loss, acc = run_epoch(test_batches, model.eval(), None) print('Test loss1: {:.6f} accuracy1: {:.6f} loss2: {:.6f} accuracy2: {:.6f}'.format(loss[0], acc[0], loss[1], acc[1])) if __name__ == '__main__': # Specify seed for deterministic behavior, then shuffle. Do not change seed for official submissions to edx np.random.seed(12321) # for reproducibility torch.manual_seed(12321) # for reproducibility main()
apache-2.0
daivietpda/m7-gpe-l
tools/perf/scripts/python/net_dropmonitor.py
4235
1554
# Monitor the system for dropped packets and proudce a report of drop locations and counts import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * drop_log = {} kallsyms = [] def get_kallsyms_table(): global kallsyms try: f = open("/proc/kallsyms", "r") linecount = 0 for line in f: linecount = linecount+1 f.seek(0) except: return j = 0 for line in f: loc = int(line.split()[0], 16) name = line.split()[2] j = j +1 if ((j % 100) == 0): print "\r" + str(j) + "/" + str(linecount), kallsyms.append({ 'loc': loc, 'name' : name}) print "\r" + str(j) + "/" + str(linecount) kallsyms.sort() return def get_sym(sloc): loc = int(sloc) for i in kallsyms: if (i['loc'] >= loc): return (i['name'], i['loc']-loc) return (None, 0) def print_drop_table(): print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT") for i in drop_log.keys(): (sym, off) = get_sym(i) if sym == None: sym = i print "%25s %25s %25s" % (sym, off, drop_log[i]) def trace_begin(): print "Starting trace (Ctrl-C to dump results)" def trace_end(): print "Gathering kallsyms data" get_kallsyms_table() print_drop_table() # called from perf, when it finds a correspoinding event def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, protocol, location): slocation = str(location) try: drop_log[slocation] = drop_log[slocation] + 1 except: drop_log[slocation] = 1
gpl-2.0
moritzpein/airflow
airflow/operators/hive_to_mysql.py
9
3561
import logging from airflow.hooks import HiveServer2Hook, MySqlHook from airflow.models import BaseOperator from airflow.utils import apply_defaults from tempfile import NamedTemporaryFile class HiveToMySqlTransfer(BaseOperator): """ Moves data from Hive to MySQL, note that for now the data is loaded into memory before being pushed to MySQL, so this operator should be used for smallish amount of data. :param sql: SQL query to execute against the MySQL database :type sql: str :param mysql_table: target MySQL table, use dot notation to target a specific database :type mysql_table: str :param mysql_conn_id: source mysql connection :type mysql_conn_id: str :param hiveserver2_conn_id: destination hive connection :type hiveserver2_conn_id: str :param mysql_preoperator: sql statement to run against mysql prior to import, typically use to truncate of delete in place of the data coming in, allowing the task to be idempotent (running the task twice won't double load data) :type mysql_preoperator: str :param mysql_postoperator: sql statement to run against mysql after the import, typically used to move data from staging to production and issue cleanup commands. :type mysql_postoperator: str :param bulk_load: flag to use bulk_load option. This loads mysql directly from a tab-delimited text file using the LOAD DATA LOCAL INFILE command. This option requires an extra connection parameter for the destination MySQL connection: {'local_infile': true}. :type bulk_load: bool """ template_fields = ('sql', 'mysql_table', 'mysql_preoperator', 'mysql_postoperator') template_ext = ('.sql',) ui_color = '#a0e08c' @apply_defaults def __init__( self, sql, mysql_table, hiveserver2_conn_id='hiveserver2_default', mysql_conn_id='mysql_default', mysql_preoperator=None, mysql_postoperator=None, bulk_load=False, *args, **kwargs): super(HiveToMySqlTransfer, self).__init__(*args, **kwargs) self.sql = sql self.mysql_table = mysql_table self.mysql_conn_id = mysql_conn_id self.mysql_preoperator = mysql_preoperator self.mysql_postoperator = mysql_postoperator self.hiveserver2_conn_id = hiveserver2_conn_id self.bulk_load = bulk_load def execute(self, context): hive = HiveServer2Hook(hiveserver2_conn_id=self.hiveserver2_conn_id) logging.info("Extracting data from Hive") logging.info(self.sql) if self.bulk_load: tmpfile = NamedTemporaryFile() hive.to_csv(self.sql, tmpfile.name, delimiter='\t', lineterminator='\n', output_header=False) else: results = hive.get_records(self.sql) mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id) if self.mysql_preoperator: logging.info("Running MySQL preoperator") mysql.run(self.mysql_preoperator) logging.info("Inserting rows into MySQL") if self.bulk_load: mysql.bulk_load(table=self.mysql_table, tmp_file=tmpfile.name) tmpfile.close() else: mysql.insert_rows(table=self.mysql_table, rows=results) if self.mysql_postoperator: logging.info("Running MySQL postoperator") mysql.run(self.mysql_postoperator) logging.info("Done.")
apache-2.0
xxsergzzxx/python-for-android
python3-alpha/python3-src/Lib/smtplib.py
45
35108
#! /usr/bin/env python3 '''SMTP/ESMTP client class. This should follow RFC 821 (SMTP), RFC 1869 (ESMTP), RFC 2554 (SMTP Authentication) and RFC 2487 (Secure SMTP over TLS). Notes: Please remember, when doing ESMTP, that the names of the SMTP service extensions are NOT the same thing as the option keywords for the RCPT and MAIL commands! Example: >>> import smtplib >>> s=smtplib.SMTP("localhost") >>> print(s.help()) This is Sendmail version 8.8.4 Topics: HELO EHLO MAIL RCPT DATA RSET NOOP QUIT HELP VRFY EXPN VERB ETRN DSN For more info use "HELP <topic>". To report bugs in the implementation send email to sendmail-bugs@sendmail.org. For local information send email to Postmaster at your site. End of HELP info >>> s.putcmd("vrfy","someone@here") >>> s.getreply() (250, "Somebody OverHere <somebody@here.my.org>") >>> s.quit() ''' # Author: The Dragon De Monsyne <dragondm@integral.org> # ESMTP support, test code and doc fixes added by # Eric S. Raymond <esr@thyrsus.com> # Better RFC 821 compliance (MAIL and RCPT, and CRLF in data) # by Carey Evans <c.evans@clear.net.nz>, for picky mail servers. # RFC 2554 (authentication) support by Gerhard Haering <gerhard@bigfoot.de>. # # This was modified from the Python 1.5 library HTTP lib. import socket import io import re import email.utils import email.message import email.generator import base64 import hmac import copy from email.base64mime import body_encode as encode_base64 from sys import stderr __all__ = ["SMTPException", "SMTPServerDisconnected", "SMTPResponseException", "SMTPSenderRefused", "SMTPRecipientsRefused", "SMTPDataError", "SMTPConnectError", "SMTPHeloError", "SMTPAuthenticationError", "quoteaddr", "quotedata", "SMTP"] SMTP_PORT = 25 SMTP_SSL_PORT = 465 CRLF = "\r\n" bCRLF = b"\r\n" OLDSTYLE_AUTH = re.compile(r"auth=(.*)", re.I) # Exception classes used by this module. class SMTPException(Exception): """Base class for all exceptions raised by this module.""" class SMTPServerDisconnected(SMTPException): """Not connected to any SMTP server. This exception is raised when the server unexpectedly disconnects, or when an attempt is made to use the SMTP instance before connecting it to a server. """ class SMTPResponseException(SMTPException): """Base class for all exceptions that include an SMTP error code. These exceptions are generated in some instances when the SMTP server returns an error code. The error code is stored in the `smtp_code' attribute of the error, and the `smtp_error' attribute is set to the error message. """ def __init__(self, code, msg): self.smtp_code = code self.smtp_error = msg self.args = (code, msg) class SMTPSenderRefused(SMTPResponseException): """Sender address refused. In addition to the attributes set by on all SMTPResponseException exceptions, this sets `sender' to the string that the SMTP refused. """ def __init__(self, code, msg, sender): self.smtp_code = code self.smtp_error = msg self.sender = sender self.args = (code, msg, sender) class SMTPRecipientsRefused(SMTPException): """All recipient addresses refused. The errors for each recipient are accessible through the attribute 'recipients', which is a dictionary of exactly the same sort as SMTP.sendmail() returns. """ def __init__(self, recipients): self.recipients = recipients self.args = (recipients,) class SMTPDataError(SMTPResponseException): """The SMTP server didn't accept the data.""" class SMTPConnectError(SMTPResponseException): """Error during connection establishment.""" class SMTPHeloError(SMTPResponseException): """The server refused our HELO reply.""" class SMTPAuthenticationError(SMTPResponseException): """Authentication error. Most probably the server didn't accept the username/password combination provided. """ def quoteaddr(addr): """Quote a subset of the email addresses defined by RFC 821. Should be able to handle anything email.utils.parseaddr can handle. """ m = (None, None) try: m = email.utils.parseaddr(addr)[1] except AttributeError: pass if m == (None, None): # Indicates parse failure or AttributeError # something weird here.. punt -ddm return "<%s>" % addr elif m is None: # the sender wants an empty return address return "<>" else: return "<%s>" % m def _addr_only(addrstring): displayname, addr = email.utils.parseaddr(addrstring) if (displayname, addr) == ('', ''): # parseaddr couldn't parse it, so use it as is. return addrstring return addr # Legacy method kept for backward compatibility. def quotedata(data): """Quote data for email. Double leading '.', and change Unix newline '\\n', or Mac '\\r' into Internet CRLF end-of-line. """ return re.sub(r'(?m)^\.', '..', re.sub(r'(?:\r\n|\n|\r(?!\n))', CRLF, data)) def _quote_periods(bindata): return re.sub(br'(?m)^\.', b'..', bindata) def _fix_eols(data): return re.sub(r'(?:\r\n|\n|\r(?!\n))', CRLF, data) try: import ssl except ImportError: _have_ssl = False else: class SSLFakeFile: """A fake file like object that really wraps a SSLObject. It only supports what is needed in smtplib. """ def __init__(self, sslobj): self.sslobj = sslobj def readline(self): str = b"" chr = None while chr != b"\n": chr = self.sslobj.read(1) if not chr: break str += chr return str def close(self): pass _have_ssl = True class SMTP: """This class manages a connection to an SMTP or ESMTP server. SMTP Objects: SMTP objects have the following attributes: helo_resp This is the message given by the server in response to the most recent HELO command. ehlo_resp This is the message given by the server in response to the most recent EHLO command. This is usually multiline. does_esmtp This is a True value _after you do an EHLO command_, if the server supports ESMTP. esmtp_features This is a dictionary, which, if the server supports ESMTP, will _after you do an EHLO command_, contain the names of the SMTP service extensions this server supports, and their parameters (if any). Note, all extension names are mapped to lower case in the dictionary. See each method's docstrings for details. In general, there is a method of the same name to perform each SMTP command. There is also a method called 'sendmail' that will do an entire mail transaction. """ debuglevel = 0 file = None helo_resp = None ehlo_msg = "ehlo" ehlo_resp = None does_esmtp = 0 default_port = SMTP_PORT def __init__(self, host='', port=0, local_hostname=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): """Initialize a new instance. If specified, `host' is the name of the remote host to which to connect. If specified, `port' specifies the port to which to connect. By default, smtplib.SMTP_PORT is used. An SMTPConnectError is raised if the specified `host' doesn't respond correctly. If specified, `local_hostname` is used as the FQDN of the local host. By default, the local hostname is found using socket.getfqdn(). """ self.timeout = timeout self.esmtp_features = {} if host: (code, msg) = self.connect(host, port) if code != 220: raise SMTPConnectError(code, msg) if local_hostname is not None: self.local_hostname = local_hostname else: # RFC 2821 says we should use the fqdn in the EHLO/HELO verb, and # if that can't be calculated, that we should use a domain literal # instead (essentially an encoded IP address like [A.B.C.D]). fqdn = socket.getfqdn() if '.' in fqdn: self.local_hostname = fqdn else: # We can't find an fqdn hostname, so use a domain literal addr = '127.0.0.1' try: addr = socket.gethostbyname(socket.gethostname()) except socket.gaierror: pass self.local_hostname = '[%s]' % addr def set_debuglevel(self, debuglevel): """Set the debug output level. A non-false value results in debug messages for connection and for all messages sent to and received from the server. """ self.debuglevel = debuglevel def _get_socket(self, host, port, timeout): # This makes it simpler for SMTP_SSL to use the SMTP connect code # and just alter the socket connection bit. if self.debuglevel > 0: print('connect:', (host, port), file=stderr) return socket.create_connection((host, port), timeout) def connect(self, host='localhost', port=0): """Connect to a host on a given port. If the hostname ends with a colon (`:') followed by a number, and there is no port specified, that suffix will be stripped off and the number interpreted as the port number to use. Note: This method is automatically invoked by __init__, if a host is specified during instantiation. """ if not port and (host.find(':') == host.rfind(':')): i = host.rfind(':') if i >= 0: host, port = host[:i], host[i + 1:] try: port = int(port) except ValueError: raise socket.error("nonnumeric port") if not port: port = self.default_port if self.debuglevel > 0: print('connect:', (host, port), file=stderr) self.sock = self._get_socket(host, port, self.timeout) (code, msg) = self.getreply() if self.debuglevel > 0: print("connect:", msg, file=stderr) return (code, msg) def send(self, s): """Send `s' to the server.""" if self.debuglevel > 0: print('send:', repr(s), file=stderr) if hasattr(self, 'sock') and self.sock: if isinstance(s, str): s = s.encode("ascii") try: self.sock.sendall(s) except socket.error: self.close() raise SMTPServerDisconnected('Server not connected') else: raise SMTPServerDisconnected('please run connect() first') def putcmd(self, cmd, args=""): """Send a command to the server.""" if args == "": str = '%s%s' % (cmd, CRLF) else: str = '%s %s%s' % (cmd, args, CRLF) self.send(str) def getreply(self): """Get a reply from the server. Returns a tuple consisting of: - server response code (e.g. '250', or such, if all goes well) Note: returns -1 if it can't read response code. - server response string corresponding to response code (multiline responses are converted to a single, multiline string). Raises SMTPServerDisconnected if end-of-file is reached. """ resp = [] if self.file is None: self.file = self.sock.makefile('rb') while 1: try: line = self.file.readline() except socket.error: line = '' if not line: self.close() raise SMTPServerDisconnected("Connection unexpectedly closed") if self.debuglevel > 0: print('reply:', repr(line), file=stderr) resp.append(line[4:].strip(b' \t\r\n')) code = line[:3] # Check that the error code is syntactically correct. # Don't attempt to read a continuation line if it is broken. try: errcode = int(code) except ValueError: errcode = -1 break # Check if multiline response. if line[3:4] != b"-": break errmsg = b"\n".join(resp) if self.debuglevel > 0: print('reply: retcode (%s); Msg: %s' % (errcode, errmsg), file=stderr) return errcode, errmsg def docmd(self, cmd, args=""): """Send a command, and return its response code.""" self.putcmd(cmd, args) return self.getreply() # std smtp commands def helo(self, name=''): """SMTP 'helo' command. Hostname to send for this command defaults to the FQDN of the local host. """ self.putcmd("helo", name or self.local_hostname) (code, msg) = self.getreply() self.helo_resp = msg return (code, msg) def ehlo(self, name=''): """ SMTP 'ehlo' command. Hostname to send for this command defaults to the FQDN of the local host. """ self.esmtp_features = {} self.putcmd(self.ehlo_msg, name or self.local_hostname) (code, msg) = self.getreply() # According to RFC1869 some (badly written) # MTA's will disconnect on an ehlo. Toss an exception if # that happens -ddm if code == -1 and len(msg) == 0: self.close() raise SMTPServerDisconnected("Server not connected") self.ehlo_resp = msg if code != 250: return (code, msg) self.does_esmtp = 1 #parse the ehlo response -ddm assert isinstance(self.ehlo_resp, bytes), repr(self.ehlo_resp) resp = self.ehlo_resp.decode("latin-1").split('\n') del resp[0] for each in resp: # To be able to communicate with as many SMTP servers as possible, # we have to take the old-style auth advertisement into account, # because: # 1) Else our SMTP feature parser gets confused. # 2) There are some servers that only advertise the auth methods we # support using the old style. auth_match = OLDSTYLE_AUTH.match(each) if auth_match: # This doesn't remove duplicates, but that's no problem self.esmtp_features["auth"] = self.esmtp_features.get("auth", "") \ + " " + auth_match.groups(0)[0] continue # RFC 1869 requires a space between ehlo keyword and parameters. # It's actually stricter, in that only spaces are allowed between # parameters, but were not going to check for that here. Note # that the space isn't present if there are no parameters. m = re.match(r'(?P<feature>[A-Za-z0-9][A-Za-z0-9\-]*) ?', each) if m: feature = m.group("feature").lower() params = m.string[m.end("feature"):].strip() if feature == "auth": self.esmtp_features[feature] = self.esmtp_features.get(feature, "") \ + " " + params else: self.esmtp_features[feature] = params return (code, msg) def has_extn(self, opt): """Does the server support a given SMTP service extension?""" return opt.lower() in self.esmtp_features def help(self, args=''): """SMTP 'help' command. Returns help text from server.""" self.putcmd("help", args) return self.getreply()[1] def rset(self): """SMTP 'rset' command -- resets session.""" return self.docmd("rset") def noop(self): """SMTP 'noop' command -- doesn't do anything :>""" return self.docmd("noop") def mail(self, sender, options=[]): """SMTP 'mail' command -- begins mail xfer session.""" optionlist = '' if options and self.does_esmtp: optionlist = ' ' + ' '.join(options) self.putcmd("mail", "FROM:%s%s" % (quoteaddr(sender), optionlist)) return self.getreply() def rcpt(self, recip, options=[]): """SMTP 'rcpt' command -- indicates 1 recipient for this mail.""" optionlist = '' if options and self.does_esmtp: optionlist = ' ' + ' '.join(options) self.putcmd("rcpt", "TO:%s%s" % (quoteaddr(recip), optionlist)) return self.getreply() def data(self, msg): """SMTP 'DATA' command -- sends message data to server. Automatically quotes lines beginning with a period per rfc821. Raises SMTPDataError if there is an unexpected reply to the DATA command; the return value from this method is the final response code received when the all data is sent. If msg is a string, lone '\r' and '\n' characters are converted to '\r\n' characters. If msg is bytes, it is transmitted as is. """ self.putcmd("data") (code, repl) = self.getreply() if self.debuglevel > 0: print("data:", (code, repl), file=stderr) if code != 354: raise SMTPDataError(code, repl) else: if isinstance(msg, str): msg = _fix_eols(msg).encode('ascii') q = _quote_periods(msg) if q[-2:] != bCRLF: q = q + bCRLF q = q + b"." + bCRLF self.send(q) (code, msg) = self.getreply() if self.debuglevel > 0: print("data:", (code, msg), file=stderr) return (code, msg) def verify(self, address): """SMTP 'verify' command -- checks for address validity.""" self.putcmd("vrfy", _addr_only(address)) return self.getreply() # a.k.a. vrfy = verify def expn(self, address): """SMTP 'expn' command -- expands a mailing list.""" self.putcmd("expn", _addr_only(address)) return self.getreply() # some useful methods def ehlo_or_helo_if_needed(self): """Call self.ehlo() and/or self.helo() if needed. If there has been no previous EHLO or HELO command this session, this method tries ESMTP EHLO first. This method may raise the following exceptions: SMTPHeloError The server didn't reply properly to the helo greeting. """ if self.helo_resp is None and self.ehlo_resp is None: if not (200 <= self.ehlo()[0] <= 299): (code, resp) = self.helo() if not (200 <= code <= 299): raise SMTPHeloError(code, resp) def login(self, user, password): """Log in on an SMTP server that requires authentication. The arguments are: - user: The user name to authenticate with. - password: The password for the authentication. If there has been no previous EHLO or HELO command this session, this method tries ESMTP EHLO first. This method will return normally if the authentication was successful. This method may raise the following exceptions: SMTPHeloError The server didn't reply properly to the helo greeting. SMTPAuthenticationError The server didn't accept the username/ password combination. SMTPException No suitable authentication method was found. """ def encode_cram_md5(challenge, user, password): challenge = base64.decodebytes(challenge) response = user + " " + hmac.HMAC(password.encode('ascii'), challenge).hexdigest() return encode_base64(response.encode('ascii'), eol='') def encode_plain(user, password): s = "\0%s\0%s" % (user, password) return encode_base64(s.encode('ascii'), eol='') AUTH_PLAIN = "PLAIN" AUTH_CRAM_MD5 = "CRAM-MD5" AUTH_LOGIN = "LOGIN" self.ehlo_or_helo_if_needed() if not self.has_extn("auth"): raise SMTPException("SMTP AUTH extension not supported by server.") # Authentication methods the server claims to support advertised_authlist = self.esmtp_features["auth"].split() # List of authentication methods we support: from preferred to # less preferred methods. Except for the purpose of testing the weaker # ones, we prefer stronger methods like CRAM-MD5: preferred_auths = [AUTH_CRAM_MD5, AUTH_PLAIN, AUTH_LOGIN] # We try the authentication methods the server advertises, but only the # ones *we* support. And in our preferred order. authlist = [auth for auth in preferred_auths if auth in advertised_authlist] if not authlist: raise SMTPException("No suitable authentication method found.") # Some servers advertise authentication methods they don't really # support, so if authentication fails, we continue until we've tried # all methods. for authmethod in authlist: if authmethod == AUTH_CRAM_MD5: (code, resp) = self.docmd("AUTH", AUTH_CRAM_MD5) if code == 334: (code, resp) = self.docmd(encode_cram_md5(resp, user, password)) elif authmethod == AUTH_PLAIN: (code, resp) = self.docmd("AUTH", AUTH_PLAIN + " " + encode_plain(user, password)) elif authmethod == AUTH_LOGIN: (code, resp) = self.docmd("AUTH", "%s %s" % (AUTH_LOGIN, encode_base64(user.encode('ascii'), eol=''))) if code == 334: (code, resp) = self.docmd(encode_base64(password.encode('ascii'), eol='')) # 235 == 'Authentication successful' # 503 == 'Error: already authenticated' if code in (235, 503): return (code, resp) # We could not login sucessfully. Return result of last attempt. raise SMTPAuthenticationError(code, resp) def starttls(self, keyfile=None, certfile=None): """Puts the connection to the SMTP server into TLS mode. If there has been no previous EHLO or HELO command this session, this method tries ESMTP EHLO first. If the server supports TLS, this will encrypt the rest of the SMTP session. If you provide the keyfile and certfile parameters, the identity of the SMTP server and client can be checked. This, however, depends on whether the socket module really checks the certificates. This method may raise the following exceptions: SMTPHeloError The server didn't reply properly to the helo greeting. """ self.ehlo_or_helo_if_needed() if not self.has_extn("starttls"): raise SMTPException("STARTTLS extension not supported by server.") (resp, reply) = self.docmd("STARTTLS") if resp == 220: if not _have_ssl: raise RuntimeError("No SSL support included in this Python") self.sock = ssl.wrap_socket(self.sock, keyfile, certfile) self.file = SSLFakeFile(self.sock) # RFC 3207: # The client MUST discard any knowledge obtained from # the server, such as the list of SMTP service extensions, # which was not obtained from the TLS negotiation itself. self.helo_resp = None self.ehlo_resp = None self.esmtp_features = {} self.does_esmtp = 0 return (resp, reply) def sendmail(self, from_addr, to_addrs, msg, mail_options=[], rcpt_options=[]): """This command performs an entire mail transaction. The arguments are: - from_addr : The address sending this mail. - to_addrs : A list of addresses to send this mail to. A bare string will be treated as a list with 1 address. - msg : The message to send. - mail_options : List of ESMTP options (such as 8bitmime) for the mail command. - rcpt_options : List of ESMTP options (such as DSN commands) for all the rcpt commands. msg may be a string containing characters in the ASCII range, or a byte string. A string is encoded to bytes using the ascii codec, and lone \\r and \\n characters are converted to \\r\\n characters. If there has been no previous EHLO or HELO command this session, this method tries ESMTP EHLO first. If the server does ESMTP, message size and each of the specified options will be passed to it. If EHLO fails, HELO will be tried and ESMTP options suppressed. This method will return normally if the mail is accepted for at least one recipient. It returns a dictionary, with one entry for each recipient that was refused. Each entry contains a tuple of the SMTP error code and the accompanying error message sent by the server. This method may raise the following exceptions: SMTPHeloError The server didn't reply properly to the helo greeting. SMTPRecipientsRefused The server rejected ALL recipients (no mail was sent). SMTPSenderRefused The server didn't accept the from_addr. SMTPDataError The server replied with an unexpected error code (other than a refusal of a recipient). Note: the connection will be open even after an exception is raised. Example: >>> import smtplib >>> s=smtplib.SMTP("localhost") >>> tolist=["one@one.org","two@two.org","three@three.org","four@four.org"] >>> msg = '''\\ ... From: Me@my.org ... Subject: testin'... ... ... This is a test ''' >>> s.sendmail("me@my.org",tolist,msg) { "three@three.org" : ( 550 ,"User unknown" ) } >>> s.quit() In the above example, the message was accepted for delivery to three of the four addresses, and one was rejected, with the error code 550. If all addresses are accepted, then the method will return an empty dictionary. """ self.ehlo_or_helo_if_needed() esmtp_opts = [] if isinstance(msg, str): msg = _fix_eols(msg).encode('ascii') if self.does_esmtp: # Hmmm? what's this? -ddm # self.esmtp_features['7bit']="" if self.has_extn('size'): esmtp_opts.append("size=%d" % len(msg)) for option in mail_options: esmtp_opts.append(option) (code, resp) = self.mail(from_addr, esmtp_opts) if code != 250: self.rset() raise SMTPSenderRefused(code, resp, from_addr) senderrs = {} if isinstance(to_addrs, str): to_addrs = [to_addrs] for each in to_addrs: (code, resp) = self.rcpt(each, rcpt_options) if (code != 250) and (code != 251): senderrs[each] = (code, resp) if len(senderrs) == len(to_addrs): # the server refused all our recipients self.rset() raise SMTPRecipientsRefused(senderrs) (code, resp) = self.data(msg) if code != 250: self.rset() raise SMTPDataError(code, resp) #if we got here then somebody got our mail return senderrs def send_message(self, msg, from_addr=None, to_addrs=None, mail_options=[], rcpt_options={}): """Converts message to a bytestring and passes it to sendmail. The arguments are as for sendmail, except that msg is an email.message.Message object. If from_addr is None or to_addrs is None, these arguments are taken from the headers of the Message as described in RFC 2822 (a ValueError is raised if there is more than one set of 'Resent-' headers). Regardless of the values of from_addr and to_addr, any Bcc field (or Resent-Bcc field, when the Message is a resent) of the Message object won't be transmitted. The Message object is then serialized using email.generator.BytesGenerator and sendmail is called to transmit the message. """ # 'Resent-Date' is a mandatory field if the Message is resent (RFC 2822 # Section 3.6.6). In such a case, we use the 'Resent-*' fields. However, # if there is more than one 'Resent-' block there's no way to # unambiguously determine which one is the most recent in all cases, # so rather than guess we raise a ValueError in that case. # # TODO implement heuristics to guess the correct Resent-* block with an # option allowing the user to enable the heuristics. (It should be # possible to guess correctly almost all of the time.) resent =msg.get_all('Resent-Date') if resent is None: header_prefix = '' elif len(resent) == 1: header_prefix = 'Resent-' else: raise ValueError("message has more than one 'Resent-' header block") if from_addr is None: # Prefer the sender field per RFC 2822:3.6.2. from_addr = (msg[header_prefix+'Sender'] if (header_prefix+'Sender') in msg else msg[header_prefix+'From']) if to_addrs is None: addr_fields = [f for f in (msg[header_prefix+'To'], msg[header_prefix+'Bcc'], msg[header_prefix+'Cc']) if f is not None] to_addrs = [a[1] for a in email.utils.getaddresses(addr_fields)] # Make a local copy so we can delete the bcc headers. msg_copy = copy.copy(msg) del msg_copy['Bcc'] del msg_copy['Resent-Bcc'] with io.BytesIO() as bytesmsg: g = email.generator.BytesGenerator(bytesmsg) g.flatten(msg_copy, linesep='\r\n') flatmsg = bytesmsg.getvalue() return self.sendmail(from_addr, to_addrs, flatmsg, mail_options, rcpt_options) def close(self): """Close the connection to the SMTP server.""" if self.file: self.file.close() self.file = None if self.sock: self.sock.close() self.sock = None def quit(self): """Terminate the SMTP session.""" res = self.docmd("quit") self.close() return res if _have_ssl: class SMTP_SSL(SMTP): """ This is a subclass derived from SMTP that connects over an SSL encrypted socket (to use this class you need a socket module that was compiled with SSL support). If host is not specified, '' (the local host) is used. If port is omitted, the standard SMTP-over-SSL port (465) is used. keyfile and certfile are also optional - they can contain a PEM formatted private key and certificate chain file for the SSL connection. """ default_port = SMTP_SSL_PORT def __init__(self, host='', port=0, local_hostname=None, keyfile=None, certfile=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): self.keyfile = keyfile self.certfile = certfile SMTP.__init__(self, host, port, local_hostname, timeout) def _get_socket(self, host, port, timeout): if self.debuglevel > 0: print('connect:', (host, port), file=stderr) new_socket = socket.create_connection((host, port), timeout) new_socket = ssl.wrap_socket(new_socket, self.keyfile, self.certfile) self.file = SSLFakeFile(new_socket) return new_socket __all__.append("SMTP_SSL") # # LMTP extension # LMTP_PORT = 2003 class LMTP(SMTP): """LMTP - Local Mail Transfer Protocol The LMTP protocol, which is very similar to ESMTP, is heavily based on the standard SMTP client. It's common to use Unix sockets for LMTP, so our connect() method must support that as well as a regular host:port server. To specify a Unix socket, you must use an absolute path as the host, starting with a '/'. Authentication is supported, using the regular SMTP mechanism. When using a Unix socket, LMTP generally don't support or require any authentication, but your mileage might vary.""" ehlo_msg = "lhlo" def __init__(self, host='', port=LMTP_PORT, local_hostname=None): """Initialize a new instance.""" SMTP.__init__(self, host, port, local_hostname) def connect(self, host='localhost', port=0): """Connect to the LMTP daemon, on either a Unix or a TCP socket.""" if host[0] != '/': return SMTP.connect(self, host, port) # Handle Unix-domain sockets. try: self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.sock.connect(host) except socket.error as msg: if self.debuglevel > 0: print('connect fail:', host, file=stderr) if self.sock: self.sock.close() self.sock = None raise socket.error(msg) (code, msg) = self.getreply() if self.debuglevel > 0: print('connect:', msg, file=stderr) return (code, msg) # Test the sendmail method, which tests most of the others. # Note: This always sends to localhost. if __name__ == '__main__': import sys def prompt(prompt): sys.stdout.write(prompt + ": ") return sys.stdin.readline().strip() fromaddr = prompt("From") toaddrs = prompt("To").split(',') print("Enter message, end with ^D:") msg = '' while 1: line = sys.stdin.readline() if not line: break msg = msg + line print("Message length is %d" % len(msg)) server = SMTP('localhost') server.set_debuglevel(1) server.sendmail(fromaddr, toaddrs, msg) server.quit()
apache-2.0
omni5cience/django-inlineformfield
.tox/py27/lib/python2.7/site-packages/django/contrib/staticfiles/views.py
78
1302
""" Views and functions for serving static files. These are only to be used during development, and SHOULD NOT be used in a production setting. """ import os import posixpath from django.conf import settings from django.http import Http404 from django.utils.six.moves.urllib.parse import unquote from django.views import static from django.contrib.staticfiles import finders def serve(request, path, insecure=False, **kwargs): """ Serve static files below a given point in the directory structure or from locations inferred from the staticfiles finders. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'django.contrib.staticfiles.views.serve') in your URLconf. It uses the django.views.static.serve() view to serve the found files. """ if not settings.DEBUG and not insecure: raise Http404 normalized_path = posixpath.normpath(unquote(path)).lstrip('/') absolute_path = finders.find(normalized_path) if not absolute_path: if path.endswith('/') or path == '': raise Http404("Directory indexes are not allowed here.") raise Http404("'%s' could not be found" % path) document_root, path = os.path.split(absolute_path) return static.serve(request, path, document_root=document_root, **kwargs)
mit
davehunt/selenium
py/test/unit/selenium/webdriver/ie/test_ie_options.py
13
5556
# Licensed to the Software Freedom Conservancy (SFC) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The SFC licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from selenium.webdriver.ie.options import Options, ElementScrollBehavior import pytest TIMEOUT = 30 @pytest.fixture def opts(): yield Options() def test_arguments(opts): arg1 = '-k' arg2 = '-private' opts.add_argument(arg1) opts.add_argument(arg2) assert arg1 in opts.arguments assert arg2 in opts.arguments def test_browser_attach_timeout(opts): opts.browser_attach_timeout = TIMEOUT assert opts.browser_attach_timeout == TIMEOUT assert opts.options.get(Options.BROWSER_ATTACH_TIMEOUT) == TIMEOUT def test_raises_exception_for_invalid_browser_attach_timeout(opts): with pytest.raises(ValueError): opts.browser_attach_timeout = 'foo' def test_element_scroll_behavior(opts): opts.element_scroll_behavior = ElementScrollBehavior.BOTTOM assert opts.element_scroll_behavior == ElementScrollBehavior.BOTTOM assert opts.options.get(Options.ELEMENT_SCROLL_BEHAVIOR) == ElementScrollBehavior.BOTTOM def test_ensure_clean_session(opts): opts.ensure_clean_session = True assert opts.ensure_clean_session is True assert opts.options.get(Options.ENSURE_CLEAN_SESSION) is True def test_file_upload_dialog_timeout(opts): opts.file_upload_dialog_timeout = TIMEOUT assert opts.file_upload_dialog_timeout is TIMEOUT assert opts.options.get(Options.FILE_UPLOAD_DIALOG_TIMEOUT) is TIMEOUT def test_raises_exception_for_file_upload_dialog_timeout(opts): with pytest.raises(ValueError): opts.file_upload_dialog_timeout = 'foo' def test_force_create_process_api(opts): opts.force_create_process_api = True assert opts.force_create_process_api is True assert opts.options.get(Options.FORCE_CREATE_PROCESS_API) is True def test_force_shell_windows_api(opts): opts.force_shell_windows_api = True assert opts.force_shell_windows_api is True assert opts.options.get(Options.FORCE_SHELL_WINDOWS_API) is True def test_full_page_screenshot(opts): opts.full_page_screenshot = True assert opts.full_page_screenshot is True assert opts.options.get(Options.FULL_PAGE_SCREENSHOT) is True def test_ignore_protected_mode_settings(opts): opts.ignore_protected_mode_settings = True assert opts.ignore_protected_mode_settings is True assert opts.options.get(Options.IGNORE_PROTECTED_MODE_SETTINGS) is True def test_ignore_zoom_level(opts): opts.ignore_zoom_level = True assert opts.ignore_zoom_level is True assert opts.options.get(Options.IGNORE_ZOOM_LEVEL) is True def test_initial_browser_url(opts): url = 'http://www.seleniumhq.org' opts.initial_browser_url = url assert opts.initial_browser_url == url assert opts.options.get(Options.INITIAL_BROWSER_URL) == url def test_native_events(opts): opts.native_events = True assert opts.native_events is True assert opts.options.get(Options.NATIVE_EVENTS) is True def test_persistent_hover(opts): opts.persistent_hover = True assert opts.persistent_hover is True assert opts.options.get(Options.PERSISTENT_HOVER) is True def test_require_window_focus(opts): opts.require_window_focus = True assert opts.require_window_focus is True assert opts.options.get(Options.REQUIRE_WINDOW_FOCUS) is True def test_use_per_process_proxy(opts): opts.use_per_process_proxy = True assert opts.use_per_process_proxy is True assert opts.options.get(Options.USE_PER_PROCESS_PROXY) is True def test_validate_cookie_document_type(opts): opts.validate_cookie_document_type = True assert opts.validate_cookie_document_type is True assert opts.options.get(Options.VALIDATE_COOKIE_DOCUMENT_TYPE) is True def test_additional_options(opts): opts.add_additional_option('foo', 'bar') assert opts.additional_options.get('foo') == 'bar' def test_to_capabilities(opts): opts._options['foo'] = 'bar' assert Options.KEY in opts.to_capabilities() assert opts.to_capabilities().get(Options.KEY) == opts._options def test_to_capabilities_arguments(opts): arg = '-k' opts.add_argument(arg) caps_opts = opts.to_capabilities().get(Options.KEY) assert caps_opts.get(Options.SWITCHES) == arg def test_to_capabilities_additional_options(opts): name = 'foo' value = 'bar' opts.add_additional_option(name, value) caps_opts = opts.to_capabilities().get(Options.KEY) assert caps_opts.get(name) == value def test_to_capabilities_should_not_modify_set_options(opts): opts._options['foo'] = 'bar' arg = '-k' opts.add_argument(arg) opts.add_additional_option('baz', 'qux') opts.to_capabilities().get(Options.KEY) assert opts.options.get('foo') == 'bar' assert opts.arguments[0] == arg assert opts.additional_options.get('baz') == 'qux'
apache-2.0
mjgrav2001/scikit-learn
sklearn/decomposition/base.py
313
5647
"""Principal Component Analysis Base Classes""" # Author: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Olivier Grisel <olivier.grisel@ensta.org> # Mathieu Blondel <mathieu@mblondel.org> # Denis A. Engemann <d.engemann@fz-juelich.de> # Kyle Kastner <kastnerkyle@gmail.com> # # License: BSD 3 clause import numpy as np from scipy import linalg from ..base import BaseEstimator, TransformerMixin from ..utils import check_array from ..utils.extmath import fast_dot from ..utils.validation import check_is_fitted from ..externals import six from abc import ABCMeta, abstractmethod class _BasePCA(six.with_metaclass(ABCMeta, BaseEstimator, TransformerMixin)): """Base class for PCA methods. Warning: This class should not be used directly. Use derived classes instead. """ def get_covariance(self): """Compute data covariance with the generative model. ``cov = components_.T * S**2 * components_ + sigma2 * eye(n_features)`` where S**2 contains the explained variances, and sigma2 contains the noise variances. Returns ------- cov : array, shape=(n_features, n_features) Estimated covariance of data. """ components_ = self.components_ exp_var = self.explained_variance_ if self.whiten: components_ = components_ * np.sqrt(exp_var[:, np.newaxis]) exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.) cov = np.dot(components_.T * exp_var_diff, components_) cov.flat[::len(cov) + 1] += self.noise_variance_ # modify diag inplace return cov def get_precision(self): """Compute data precision matrix with the generative model. Equals the inverse of the covariance but computed with the matrix inversion lemma for efficiency. Returns ------- precision : array, shape=(n_features, n_features) Estimated precision of data. """ n_features = self.components_.shape[1] # handle corner cases first if self.n_components_ == 0: return np.eye(n_features) / self.noise_variance_ if self.n_components_ == n_features: return linalg.inv(self.get_covariance()) # Get precision using matrix inversion lemma components_ = self.components_ exp_var = self.explained_variance_ if self.whiten: components_ = components_ * np.sqrt(exp_var[:, np.newaxis]) exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.) precision = np.dot(components_, components_.T) / self.noise_variance_ precision.flat[::len(precision) + 1] += 1. / exp_var_diff precision = np.dot(components_.T, np.dot(linalg.inv(precision), components_)) precision /= -(self.noise_variance_ ** 2) precision.flat[::len(precision) + 1] += 1. / self.noise_variance_ return precision @abstractmethod def fit(X, y=None): """Placeholder for fit. Subclasses should implement this method! Fit the model with X. Parameters ---------- X : array-like, shape (n_samples, n_features) Training data, where n_samples is the number of samples and n_features is the number of features. Returns ------- self : object Returns the instance itself. """ def transform(self, X, y=None): """Apply dimensionality reduction to X. X is projected on the first principal components previously extracted from a training set. Parameters ---------- X : array-like, shape (n_samples, n_features) New data, where n_samples is the number of samples and n_features is the number of features. Returns ------- X_new : array-like, shape (n_samples, n_components) Examples -------- >>> import numpy as np >>> from sklearn.decomposition import IncrementalPCA >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]]) >>> ipca = IncrementalPCA(n_components=2, batch_size=3) >>> ipca.fit(X) IncrementalPCA(batch_size=3, copy=True, n_components=2, whiten=False) >>> ipca.transform(X) # doctest: +SKIP """ check_is_fitted(self, ['mean_', 'components_'], all_or_any=all) X = check_array(X) if self.mean_ is not None: X = X - self.mean_ X_transformed = fast_dot(X, self.components_.T) if self.whiten: X_transformed /= np.sqrt(self.explained_variance_) return X_transformed def inverse_transform(self, X, y=None): """Transform data back to its original space. In other words, return an input X_original whose transform would be X. Parameters ---------- X : array-like, shape (n_samples, n_components) New data, where n_samples is the number of samples and n_components is the number of components. Returns ------- X_original array-like, shape (n_samples, n_features) Notes ----- If whitening is enabled, inverse_transform will compute the exact inverse operation, which includes reversing whitening. """ if self.whiten: return fast_dot(X, np.sqrt(self.explained_variance_[:, np.newaxis]) * self.components_) + self.mean_ else: return fast_dot(X, self.components_) + self.mean_
bsd-3-clause
skywin/p2pool
SOAPpy/Types.py
289
52214
from __future__ import nested_scopes """ ################################################################################ # Copyright (c) 2003, Pfizer # Copyright (c) 2001, Cayce Ullman. # Copyright (c) 2001, Brian Matthews. # # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # Neither the name of actzero, inc. nor the names of its contributors may # be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ################################################################################ """ ident = '$Id: Types.py 1496 2010-03-04 23:46:17Z pooryorick $' from version import __version__ import UserList import base64 import cgi import urllib import copy import re import time from types import * # SOAPpy modules from Errors import * from NS import NS from Utilities import encodeHexString, cleanDate from Config import Config ############################################################################### # Utility functions ############################################################################### def isPrivate(name): return name[0]=='_' def isPublic(name): return name[0]!='_' ############################################################################### # Types and Wrappers ############################################################################### class anyType: _validURIs = (NS.XSD, NS.XSD2, NS.XSD3, NS.ENC) def __init__(self, data = None, name = None, typed = 1, attrs = None): if self.__class__ == anyType: raise Error, "anyType can't be instantiated directly" if type(name) in (ListType, TupleType): self._ns, self._name = name else: self._ns = self._validURIs[0] self._name = name self._typed = typed self._attrs = {} self._cache = None self._type = self._typeName() self._data = self._checkValueSpace(data) if attrs != None: self._setAttrs(attrs) def __str__(self): if hasattr(self,'_name') and self._name: return "<%s %s at %d>" % (self.__class__, self._name, id(self)) return "<%s at %d>" % (self.__class__, id(self)) __repr__ = __str__ def _checkValueSpace(self, data): return data def _marshalData(self): return str(self._data) def _marshalAttrs(self, ns_map, builder): a = '' for attr, value in self._attrs.items(): ns, n = builder.genns(ns_map, attr[0]) a += n + ' %s%s="%s"' % \ (ns, attr[1], cgi.escape(str(value), 1)) return a def _fixAttr(self, attr): if type(attr) in (StringType, UnicodeType): attr = (None, attr) elif type(attr) == ListType: attr = tuple(attr) elif type(attr) != TupleType: raise AttributeError, "invalid attribute type" if len(attr) != 2: raise AttributeError, "invalid attribute length" if type(attr[0]) not in (NoneType, StringType, UnicodeType): raise AttributeError, "invalid attribute namespace URI type" return attr def _getAttr(self, attr): attr = self._fixAttr(attr) try: return self._attrs[attr] except: return None def _setAttr(self, attr, value): attr = self._fixAttr(attr) if type(value) is StringType: value = unicode(value) self._attrs[attr] = value def _setAttrs(self, attrs): if type(attrs) in (ListType, TupleType): for i in range(0, len(attrs), 2): self._setAttr(attrs[i], attrs[i + 1]) return if type(attrs) == DictType: d = attrs elif isinstance(attrs, anyType): d = attrs._attrs else: raise AttributeError, "invalid attribute type" for attr, value in d.items(): self._setAttr(attr, value) def _setMustUnderstand(self, val): self._setAttr((NS.ENV, "mustUnderstand"), val) def _getMustUnderstand(self): return self._getAttr((NS.ENV, "mustUnderstand")) def _setActor(self, val): self._setAttr((NS.ENV, "actor"), val) def _getActor(self): return self._getAttr((NS.ENV, "actor")) def _typeName(self): return self.__class__.__name__[:-4] def _validNamespaceURI(self, URI, strict): if not hasattr(self, '_typed') or not self._typed: return None if URI in self._validURIs: return URI if not strict: return self._ns raise AttributeError, \ "not a valid namespace for type %s" % self._type class voidType(anyType): pass class stringType(anyType): def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (StringType, UnicodeType): raise AttributeError, "invalid %s type:" % self._type return data def _marshalData(self): return self._data class untypedType(stringType): def __init__(self, data = None, name = None, attrs = None): stringType.__init__(self, data, name, 0, attrs) class IDType(stringType): pass class NCNameType(stringType): pass class NameType(stringType): pass class ENTITYType(stringType): pass class IDREFType(stringType): pass class languageType(stringType): pass class NMTOKENType(stringType): pass class QNameType(stringType): pass class tokenType(anyType): _validURIs = (NS.XSD2, NS.XSD3) __invalidre = '[\n\t]|^ | $| ' def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (StringType, UnicodeType): raise AttributeError, "invalid %s type" % self._type if type(self.__invalidre) == StringType: self.__invalidre = re.compile(self.__invalidre) if self.__invalidre.search(data): raise ValueError, "invalid %s value" % self._type return data class normalizedStringType(anyType): _validURIs = (NS.XSD3,) __invalidre = '[\n\r\t]' def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (StringType, UnicodeType): raise AttributeError, "invalid %s type" % self._type if type(self.__invalidre) == StringType: self.__invalidre = re.compile(self.__invalidre) if self.__invalidre.search(data): raise ValueError, "invalid %s value" % self._type return data class CDATAType(normalizedStringType): _validURIs = (NS.XSD2,) class booleanType(anyType): def __int__(self): return self._data __nonzero__ = __int__ def _marshalData(self): return ['false', 'true'][self._data] def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if data in (0, '0', 'false', ''): return 0 if data in (1, '1', 'true'): return 1 raise ValueError, "invalid %s value" % self._type class decimalType(anyType): def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType, FloatType): raise Error, "invalid %s value" % self._type return data class floatType(anyType): def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType, FloatType) or \ data < -3.4028234663852886E+38 or \ data > 3.4028234663852886E+38: raise ValueError, "invalid %s value: %s" % (self._type, repr(data)) return data def _marshalData(self): return "%.18g" % self._data # More precision class doubleType(anyType): def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType, FloatType) or \ data < -1.7976931348623158E+308 or \ data > 1.7976931348623157E+308: raise ValueError, "invalid %s value: %s" % (self._type, repr(data)) return data def _marshalData(self): return "%.18g" % self._data # More precision class durationType(anyType): _validURIs = (NS.XSD3,) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type try: # A tuple or a scalar is OK, but make them into a list if type(data) == TupleType: data = list(data) elif type(data) != ListType: data = [data] if len(data) > 6: raise Exception, "too many values" # Now check the types of all the components, and find # the first nonzero element along the way. f = -1 for i in range(len(data)): if data[i] == None: data[i] = 0 continue if type(data[i]) not in \ (IntType, LongType, FloatType): raise Exception, "element %d a bad type" % i if data[i] and f == -1: f = i # If they're all 0, just use zero seconds. if f == -1: self._cache = 'PT0S' return (0,) * 6 # Make sure only the last nonzero element has a decimal fraction # and only the first element is negative. d = -1 for i in range(f, len(data)): if data[i]: if d != -1: raise Exception, \ "all except the last nonzero element must be " \ "integers" if data[i] < 0 and i > f: raise Exception, \ "only the first nonzero element can be negative" elif data[i] != long(data[i]): d = i # Pad the list on the left if necessary. if len(data) < 6: n = 6 - len(data) f += n d += n data = [0] * n + data # Save index of the first nonzero element and the decimal # element for _marshalData. self.__firstnonzero = f self.__decimal = d except Exception, e: raise ValueError, "invalid %s value - %s" % (self._type, e) return tuple(data) def _marshalData(self): if self._cache == None: d = self._data t = 0 if d[self.__firstnonzero] < 0: s = '-P' else: s = 'P' t = 0 for i in range(self.__firstnonzero, len(d)): if d[i]: if i > 2 and not t: s += 'T' t = 1 if self.__decimal == i: s += "%g" % abs(d[i]) else: s += "%d" % long(abs(d[i])) s += ['Y', 'M', 'D', 'H', 'M', 'S'][i] self._cache = s return self._cache class timeDurationType(durationType): _validURIs = (NS.XSD, NS.XSD2, NS.ENC) class dateTimeType(anyType): _validURIs = (NS.XSD3,) def _checkValueSpace(self, data): try: if data == None: data = time.time() if (type(data) in (IntType, LongType)): data = list(time.gmtime(data)[:6]) elif (type(data) == FloatType): f = data - int(data) data = list(time.gmtime(int(data))[:6]) data[5] += f elif type(data) in (ListType, TupleType): if len(data) < 6: raise Exception, "not enough values" if len(data) > 9: raise Exception, "too many values" data = list(data[:6]) cleanDate(data) else: raise Exception, "invalid type" except Exception, e: raise ValueError, "invalid %s value - %s" % (self._type, e) return tuple(data) def _marshalData(self): if self._cache == None: d = self._data s = "%04d-%02d-%02dT%02d:%02d:%02d" % ((abs(d[0]),) + d[1:]) if d[0] < 0: s = '-' + s f = d[5] - int(d[5]) if f != 0: s += ("%g" % f)[1:] s += 'Z' self._cache = s return self._cache class recurringInstantType(anyType): _validURIs = (NS.XSD,) def _checkValueSpace(self, data): try: if data == None: data = list(time.gmtime(time.time())[:6]) if (type(data) in (IntType, LongType)): data = list(time.gmtime(data)[:6]) elif (type(data) == FloatType): f = data - int(data) data = list(time.gmtime(int(data))[:6]) data[5] += f elif type(data) in (ListType, TupleType): if len(data) < 1: raise Exception, "not enough values" if len(data) > 9: raise Exception, "too many values" data = list(data[:6]) if len(data) < 6: data += [0] * (6 - len(data)) f = len(data) for i in range(f): if data[i] == None: if f < i: raise Exception, \ "only leftmost elements can be none" else: f = i break cleanDate(data, f) else: raise Exception, "invalid type" except Exception, e: raise ValueError, "invalid %s value - %s" % (self._type, e) return tuple(data) def _marshalData(self): if self._cache == None: d = self._data e = list(d) neg = '' if not e[0]: e[0] = '--' else: if e[0] < 0: neg = '-' e[0] = abs(e[0]) if e[0] < 100: e[0] = '-' + "%02d" % e[0] else: e[0] = "%04d" % e[0] for i in range(1, len(e)): if e[i] == None or (i < 3 and e[i] == 0): e[i] = '-' else: if e[i] < 0: neg = '-' e[i] = abs(e[i]) e[i] = "%02d" % e[i] if d[5]: f = abs(d[5] - int(d[5])) if f: e[5] += ("%g" % f)[1:] s = "%s%s-%s-%sT%s:%s:%sZ" % ((neg,) + tuple(e)) self._cache = s return self._cache class timeInstantType(dateTimeType): _validURIs = (NS.XSD, NS.XSD2, NS.ENC) class timePeriodType(dateTimeType): _validURIs = (NS.XSD2, NS.ENC) class timeType(anyType): def _checkValueSpace(self, data): try: if data == None: data = time.gmtime(time.time())[3:6] elif (type(data) == FloatType): f = data - int(data) data = list(time.gmtime(int(data))[3:6]) data[2] += f elif type(data) in (IntType, LongType): data = time.gmtime(data)[3:6] elif type(data) in (ListType, TupleType): if len(data) == 9: data = data[3:6] elif len(data) > 3: raise Exception, "too many values" data = [None, None, None] + list(data) if len(data) < 6: data += [0] * (6 - len(data)) cleanDate(data, 3) data = data[3:] else: raise Exception, "invalid type" except Exception, e: raise ValueError, "invalid %s value - %s" % (self._type, e) return tuple(data) def _marshalData(self): if self._cache == None: d = self._data #s = '' # #s = time.strftime("%H:%M:%S", (0, 0, 0) + d + (0, 0, -1)) s = "%02d:%02d:%02d" % d f = d[2] - int(d[2]) if f != 0: s += ("%g" % f)[1:] s += 'Z' self._cache = s return self._cache class dateType(anyType): def _checkValueSpace(self, data): try: if data == None: data = time.gmtime(time.time())[0:3] elif type(data) in (IntType, LongType, FloatType): data = time.gmtime(data)[0:3] elif type(data) in (ListType, TupleType): if len(data) == 9: data = data[0:3] elif len(data) > 3: raise Exception, "too many values" data = list(data) if len(data) < 3: data += [1, 1, 1][len(data):] data += [0, 0, 0] cleanDate(data) data = data[:3] else: raise Exception, "invalid type" except Exception, e: raise ValueError, "invalid %s value - %s" % (self._type, e) return tuple(data) def _marshalData(self): if self._cache == None: d = self._data s = "%04d-%02d-%02dZ" % ((abs(d[0]),) + d[1:]) if d[0] < 0: s = '-' + s self._cache = s return self._cache class gYearMonthType(anyType): _validURIs = (NS.XSD3,) def _checkValueSpace(self, data): try: if data == None: data = time.gmtime(time.time())[0:2] elif type(data) in (IntType, LongType, FloatType): data = time.gmtime(data)[0:2] elif type(data) in (ListType, TupleType): if len(data) == 9: data = data[0:2] elif len(data) > 2: raise Exception, "too many values" data = list(data) if len(data) < 2: data += [1, 1][len(data):] data += [1, 0, 0, 0] cleanDate(data) data = data[:2] else: raise Exception, "invalid type" except Exception, e: raise ValueError, "invalid %s value - %s" % (self._type, e) return tuple(data) def _marshalData(self): if self._cache == None: d = self._data s = "%04d-%02dZ" % ((abs(d[0]),) + d[1:]) if d[0] < 0: s = '-' + s self._cache = s return self._cache class gYearType(anyType): _validURIs = (NS.XSD3,) def _checkValueSpace(self, data): try: if data == None: data = time.gmtime(time.time())[0:1] elif type(data) in (IntType, LongType, FloatType): data = [data] if type(data) in (ListType, TupleType): if len(data) == 9: data = data[0:1] elif len(data) < 1: raise Exception, "too few values" elif len(data) > 1: raise Exception, "too many values" if type(data[0]) == FloatType: try: s = int(data[0]) except: s = long(data[0]) if s != data[0]: raise Exception, "not integral" data = [s] elif type(data[0]) not in (IntType, LongType): raise Exception, "bad type" else: raise Exception, "invalid type" except Exception, e: raise ValueError, "invalid %s value - %s" % (self._type, e) return data[0] def _marshalData(self): if self._cache == None: d = self._data s = "%04dZ" % abs(d) if d < 0: s = '-' + s self._cache = s return self._cache class centuryType(anyType): _validURIs = (NS.XSD2, NS.ENC) def _checkValueSpace(self, data): try: if data == None: data = time.gmtime(time.time())[0:1] / 100 elif type(data) in (IntType, LongType, FloatType): data = [data] if type(data) in (ListType, TupleType): if len(data) == 9: data = data[0:1] / 100 elif len(data) < 1: raise Exception, "too few values" elif len(data) > 1: raise Exception, "too many values" if type(data[0]) == FloatType: try: s = int(data[0]) except: s = long(data[0]) if s != data[0]: raise Exception, "not integral" data = [s] elif type(data[0]) not in (IntType, LongType): raise Exception, "bad type" else: raise Exception, "invalid type" except Exception, e: raise ValueError, "invalid %s value - %s" % (self._type, e) return data[0] def _marshalData(self): if self._cache == None: d = self._data s = "%02dZ" % abs(d) if d < 0: s = '-' + s self._cache = s return self._cache class yearType(gYearType): _validURIs = (NS.XSD2, NS.ENC) class gMonthDayType(anyType): _validURIs = (NS.XSD3,) def _checkValueSpace(self, data): try: if data == None: data = time.gmtime(time.time())[1:3] elif type(data) in (IntType, LongType, FloatType): data = time.gmtime(data)[1:3] elif type(data) in (ListType, TupleType): if len(data) == 9: data = data[0:2] elif len(data) > 2: raise Exception, "too many values" data = list(data) if len(data) < 2: data += [1, 1][len(data):] data = [0] + data + [0, 0, 0] cleanDate(data, 1) data = data[1:3] else: raise Exception, "invalid type" except Exception, e: raise ValueError, "invalid %s value - %s" % (self._type, e) return tuple(data) def _marshalData(self): if self._cache == None: self._cache = "--%02d-%02dZ" % self._data return self._cache class recurringDateType(gMonthDayType): _validURIs = (NS.XSD2, NS.ENC) class gMonthType(anyType): _validURIs = (NS.XSD3,) def _checkValueSpace(self, data): try: if data == None: data = time.gmtime(time.time())[1:2] elif type(data) in (IntType, LongType, FloatType): data = [data] if type(data) in (ListType, TupleType): if len(data) == 9: data = data[1:2] elif len(data) < 1: raise Exception, "too few values" elif len(data) > 1: raise Exception, "too many values" if type(data[0]) == FloatType: try: s = int(data[0]) except: s = long(data[0]) if s != data[0]: raise Exception, "not integral" data = [s] elif type(data[0]) not in (IntType, LongType): raise Exception, "bad type" if data[0] < 1 or data[0] > 12: raise Exception, "bad value" else: raise Exception, "invalid type" except Exception, e: raise ValueError, "invalid %s value - %s" % (self._type, e) return data[0] def _marshalData(self): if self._cache == None: self._cache = "--%02d--Z" % self._data return self._cache class monthType(gMonthType): _validURIs = (NS.XSD2, NS.ENC) class gDayType(anyType): _validURIs = (NS.XSD3,) def _checkValueSpace(self, data): try: if data == None: data = time.gmtime(time.time())[2:3] elif type(data) in (IntType, LongType, FloatType): data = [data] if type(data) in (ListType, TupleType): if len(data) == 9: data = data[2:3] elif len(data) < 1: raise Exception, "too few values" elif len(data) > 1: raise Exception, "too many values" if type(data[0]) == FloatType: try: s = int(data[0]) except: s = long(data[0]) if s != data[0]: raise Exception, "not integral" data = [s] elif type(data[0]) not in (IntType, LongType): raise Exception, "bad type" if data[0] < 1 or data[0] > 31: raise Exception, "bad value" else: raise Exception, "invalid type" except Exception, e: raise ValueError, "invalid %s value - %s" % (self._type, e) return data[0] def _marshalData(self): if self._cache == None: self._cache = "---%02dZ" % self._data return self._cache class recurringDayType(gDayType): _validURIs = (NS.XSD2, NS.ENC) class hexBinaryType(anyType): _validURIs = (NS.XSD3,) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (StringType, UnicodeType): raise AttributeError, "invalid %s type" % self._type return data def _marshalData(self): if self._cache == None: self._cache = encodeHexString(self._data) return self._cache class base64BinaryType(anyType): _validURIs = (NS.XSD3,) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (StringType, UnicodeType): raise AttributeError, "invalid %s type" % self._type return data def _marshalData(self): if self._cache == None: self._cache = base64.encodestring(self._data) return self._cache class base64Type(base64BinaryType): _validURIs = (NS.ENC,) class binaryType(anyType): _validURIs = (NS.XSD, NS.ENC) def __init__(self, data, name = None, typed = 1, encoding = 'base64', attrs = None): anyType.__init__(self, data, name, typed, attrs) self._setAttr('encoding', encoding) def _marshalData(self): if self._cache == None: if self._getAttr((None, 'encoding')) == 'base64': self._cache = base64.encodestring(self._data) else: self._cache = encodeHexString(self._data) return self._cache def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (StringType, UnicodeType): raise AttributeError, "invalid %s type" % self._type return data def _setAttr(self, attr, value): attr = self._fixAttr(attr) if attr[1] == 'encoding': if attr[0] != None or value not in ('base64', 'hex'): raise AttributeError, "invalid encoding" self._cache = None anyType._setAttr(self, attr, value) class anyURIType(anyType): _validURIs = (NS.XSD3,) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (StringType, UnicodeType): raise AttributeError, "invalid %s type" % self._type return data def _marshalData(self): if self._cache == None: self._cache = urllib.quote(self._data) return self._cache class uriType(anyURIType): _validURIs = (NS.XSD,) class uriReferenceType(anyURIType): _validURIs = (NS.XSD2,) class NOTATIONType(anyType): def __init__(self, data, name = None, typed = 1, attrs = None): if self.__class__ == NOTATIONType: raise Error, "a NOTATION can't be instantiated directly" anyType.__init__(self, data, name, typed, attrs) class ENTITIESType(anyType): def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) in (StringType, UnicodeType): return (data,) if type(data) not in (ListType, TupleType) or \ filter (lambda x: type(x) not in (StringType, UnicodeType), data): raise AttributeError, "invalid %s type" % self._type return data def _marshalData(self): return ' '.join(self._data) class IDREFSType(ENTITIESType): pass class NMTOKENSType(ENTITIESType): pass class integerType(anyType): def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType): raise ValueError, "invalid %s value" % self._type return data class nonPositiveIntegerType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or data > 0: raise ValueError, "invalid %s value" % self._type return data class non_Positive_IntegerType(nonPositiveIntegerType): _validURIs = (NS.XSD,) def _typeName(self): return 'non-positive-integer' class negativeIntegerType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or data >= 0: raise ValueError, "invalid %s value" % self._type return data class negative_IntegerType(negativeIntegerType): _validURIs = (NS.XSD,) def _typeName(self): return 'negative-integer' class longType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or \ data < -9223372036854775808L or \ data > 9223372036854775807L: raise ValueError, "invalid %s value" % self._type return data class intType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or \ data < -2147483648L or \ data > 2147483647L: raise ValueError, "invalid %s value" % self._type return data class shortType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or \ data < -32768 or \ data > 32767: raise ValueError, "invalid %s value" % self._type return data class byteType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or \ data < -128 or \ data > 127: raise ValueError, "invalid %s value" % self._type return data class nonNegativeIntegerType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or data < 0: raise ValueError, "invalid %s value" % self._type return data class non_Negative_IntegerType(nonNegativeIntegerType): _validURIs = (NS.XSD,) def _typeName(self): return 'non-negative-integer' class unsignedLongType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or \ data < 0 or \ data > 18446744073709551615L: raise ValueError, "invalid %s value" % self._type return data class unsignedIntType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or \ data < 0 or \ data > 4294967295L: raise ValueError, "invalid %s value" % self._type return data class unsignedShortType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or \ data < 0 or \ data > 65535: raise ValueError, "invalid %s value" % self._type return data class unsignedByteType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or \ data < 0 or \ data > 255: raise ValueError, "invalid %s value" % self._type return data class positiveIntegerType(anyType): _validURIs = (NS.XSD2, NS.XSD3, NS.ENC) def _checkValueSpace(self, data): if data == None: raise ValueError, "must supply initial %s value" % self._type if type(data) not in (IntType, LongType) or data <= 0: raise ValueError, "invalid %s value" % self._type return data class positive_IntegerType(positiveIntegerType): _validURIs = (NS.XSD,) def _typeName(self): return 'positive-integer' # Now compound types class compoundType(anyType): def __init__(self, data = None, name = None, typed = 1, attrs = None): if self.__class__ == compoundType: raise Error, "a compound can't be instantiated directly" anyType.__init__(self, data, name, typed, attrs) self._keyord = [] if type(data) == DictType: self.__dict__.update(data) def _aslist(self, item=None): if item is not None: return self.__dict__[self._keyord[item]] else: return map( lambda x: self.__dict__[x], self._keyord) def _asdict(self, item=None, encoding=Config.dict_encoding): if item is not None: if type(item) in (UnicodeType,StringType): item = item.encode(encoding) return self.__dict__[item] else: retval = {} def fun(x): retval[x.encode(encoding)] = self.__dict__[x] if hasattr(self, '_keyord'): map( fun, self._keyord) else: for name in dir(self): if isPublic(name): retval[name] = getattr(self,name) return retval def __getitem__(self, item): if type(item) == IntType: return self.__dict__[self._keyord[item]] else: return getattr(self, item) def __len__(self): return len(self._keyord) def __nonzero__(self): return 1 def _keys(self): return filter(lambda x: x[0] != '_', self.__dict__.keys()) def _addItem(self, name, value, attrs = None): if name in self._keyord: if type(self.__dict__[name]) != ListType: self.__dict__[name] = [self.__dict__[name]] self.__dict__[name].append(value) else: self.__dict__[name] = value self._keyord.append(name) def _placeItem(self, name, value, pos, subpos = 0, attrs = None): if subpos == 0 and type(self.__dict__[name]) != ListType: self.__dict__[name] = value else: self.__dict__[name][subpos] = value # only add to key order list if it does not already # exist in list if not (name in self._keyord): if pos < len(x): self._keyord[pos] = name else: self._keyord.append(name) def _getItemAsList(self, name, default = []): try: d = self.__dict__[name] except: return default if type(d) == ListType: return d return [d] def __str__(self): return anyType.__str__(self) + ": " + str(self._asdict()) def __repr__(self): return self.__str__() class structType(compoundType): pass class headerType(structType): _validURIs = (NS.ENV,) def __init__(self, data = None, typed = 1, attrs = None): structType.__init__(self, data, "Header", typed, attrs) class bodyType(structType): _validURIs = (NS.ENV,) def __init__(self, data = None, typed = 1, attrs = None): structType.__init__(self, data, "Body", typed, attrs) class arrayType(UserList.UserList, compoundType): def __init__(self, data = None, name = None, attrs = None, offset = 0, rank = None, asize = 0, elemsname = None): if data: if type(data) not in (ListType, TupleType): raise Error, "Data must be a sequence" UserList.UserList.__init__(self, data) compoundType.__init__(self, data, name, 0, attrs) self._elemsname = elemsname or "item" if data == None: self._rank = rank # According to 5.4.2.2 in the SOAP spec, each element in a # sparse array must have a position. _posstate keeps track of # whether we've seen a position or not. It's possible values # are: # -1 No elements have been added, so the state is indeterminate # 0 An element without a position has been added, so no # elements can have positions # 1 An element with a position has been added, so all elements # must have positions self._posstate = -1 self._full = 0 if asize in ('', None): asize = '0' self._dims = map (lambda x: int(x), str(asize).split(',')) self._dims.reverse() # It's easier to work with this way self._poss = [0] * len(self._dims) # This will end up # reversed too for i in range(len(self._dims)): if self._dims[i] < 0 or \ self._dims[i] == 0 and len(self._dims) > 1: raise TypeError, "invalid Array dimensions" if offset > 0: self._poss[i] = offset % self._dims[i] offset = int(offset / self._dims[i]) # Don't break out of the loop if offset is 0 so we test all the # dimensions for > 0. if offset: raise AttributeError, "invalid Array offset" a = [None] * self._dims[0] for i in range(1, len(self._dims)): b = [] for j in range(self._dims[i]): b.append(copy.deepcopy(a)) a = b self.data = a def _aslist(self, item=None): if item is not None: return self.data[int(item)] else: return self.data def _asdict(self, item=None, encoding=Config.dict_encoding): if item is not None: if type(item) in (UnicodeType,StringType): item = item.encode(encoding) return self.data[int(item)] else: retval = {} def fun(x): retval[str(x).encode(encoding)] = self.data[x] map( fun, range(len(self.data)) ) return retval def __getitem__(self, item): try: return self.data[int(item)] except ValueError: return getattr(self, item) def __len__(self): return len(self.data) def __nonzero__(self): return 1 def __str__(self): return anyType.__str__(self) + ": " + str(self._aslist()) def _keys(self): return filter(lambda x: x[0] != '_', self.__dict__.keys()) def _addItem(self, name, value, attrs): if self._full: raise ValueError, "Array is full" pos = attrs.get((NS.ENC, 'position')) if pos != None: if self._posstate == 0: raise AttributeError, \ "all elements in a sparse Array must have a " \ "position attribute" self._posstate = 1 try: if pos[0] == '[' and pos[-1] == ']': pos = map (lambda x: int(x), pos[1:-1].split(',')) pos.reverse() if len(pos) == 1: pos = pos[0] curpos = [0] * len(self._dims) for i in range(len(self._dims)): curpos[i] = pos % self._dims[i] pos = int(pos / self._dims[i]) if pos == 0: break if pos: raise Exception elif len(pos) != len(self._dims): raise Exception else: for i in range(len(self._dims)): if pos[i] >= self._dims[i]: raise Exception curpos = pos else: raise Exception except: raise AttributeError, \ "invalid Array element position %s" % str(pos) else: if self._posstate == 1: raise AttributeError, \ "only elements in a sparse Array may have a " \ "position attribute" self._posstate = 0 curpos = self._poss a = self.data for i in range(len(self._dims) - 1, 0, -1): a = a[curpos[i]] if curpos[0] >= len(a): a += [None] * (len(a) - curpos[0] + 1) a[curpos[0]] = value if pos == None: self._poss[0] += 1 for i in range(len(self._dims) - 1): if self._poss[i] < self._dims[i]: break self._poss[i] = 0 self._poss[i + 1] += 1 if self._dims[-1] and self._poss[-1] >= self._dims[-1]: #self._full = 1 #FIXME: why is this occuring? pass def _placeItem(self, name, value, pos, subpos, attrs = None): curpos = [0] * len(self._dims) for i in range(len(self._dims)): if self._dims[i] == 0: curpos[0] = pos break curpos[i] = pos % self._dims[i] pos = int(pos / self._dims[i]) if pos == 0: break if self._dims[i] != 0 and pos: raise Error, "array index out of range" a = self.data for i in range(len(self._dims) - 1, 0, -1): a = a[curpos[i]] if curpos[0] >= len(a): a += [None] * (len(a) - curpos[0] + 1) a[curpos[0]] = value class typedArrayType(arrayType): def __init__(self, data = None, name = None, typed = None, attrs = None, offset = 0, rank = None, asize = 0, elemsname = None, complexType = 0): arrayType.__init__(self, data, name, attrs, offset, rank, asize, elemsname) self._typed = 1 self._type = typed self._complexType = complexType class faultType(structType, Error): def __init__(self, faultcode = "", faultstring = "", detail = None): self.faultcode = faultcode self.faultstring = faultstring if detail != None: self.detail = detail structType.__init__(self, None, 0) def _setDetail(self, detail = None): if detail != None: self.detail = detail else: try: del self.detail except AttributeError: pass def __repr__(self): if getattr(self, 'detail', None) != None: return "<Fault %s: %s: %s>" % (self.faultcode, self.faultstring, self.detail) else: return "<Fault %s: %s>" % (self.faultcode, self.faultstring) __str__ = __repr__ def __call__(self): return (self.faultcode, self.faultstring, self.detail) class SOAPException(Exception): def __init__(self, code="", string="", detail=None): self.value = ("SOAPpy SOAP Exception", code, string, detail) self.code = code self.string = string self.detail = detail def __str__(self): return repr(self.value) class RequiredHeaderMismatch(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class MethodNotFound(Exception): def __init__(self, value): (val, detail) = value.split(":") self.value = val self.detail = detail def __str__(self): return repr(self.value, self.detail) class AuthorizationFailed(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class MethodFailed(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) ####### # Convert complex SOAPpy objects to native python equivalents ####### def simplify(object, level=0): """ Convert the SOAPpy objects and their contents to simple python types. This function recursively converts the passed 'container' object, and all public subobjects. (Private subobjects have names that start with '_'.) Conversions: - faultType --> raise python exception - arrayType --> array - compoundType --> dictionary """ if level > 10: return object if isinstance( object, faultType ): if object.faultstring == "Required Header Misunderstood": raise RequiredHeaderMismatch(object.detail) elif object.faultstring == "Method Not Found": raise MethodNotFound(object.detail) elif object.faultstring == "Authorization Failed": raise AuthorizationFailed(object.detail) elif object.faultstring == "Method Failed": raise MethodFailed(object.detail) else: se = SOAPException(object.faultcode, object.faultstring, object.detail) raise se elif isinstance( object, arrayType ): data = object._aslist() for k in range(len(data)): data[k] = simplify(data[k], level=level+1) return data elif isinstance( object, compoundType ) or isinstance(object, structType): data = object._asdict() for k in data.keys(): if isPublic(k): data[k] = simplify(data[k], level=level+1) return data elif type(object)==DictType: for k in object.keys(): if isPublic(k): object[k] = simplify(object[k]) return object elif type(object)==list: for k in range(len(object)): object[k] = simplify(object[k]) return object else: return object def simplify_contents(object, level=0): """ Convert the contents of SOAPpy objects to simple python types. This function recursively converts the sub-objects contained in a 'container' object to simple python types. Conversions: - faultType --> raise python exception - arrayType --> array - compoundType --> dictionary """ if level>10: return object if isinstance( object, faultType ): for k in object._keys(): if isPublic(k): setattr(object, k, simplify(object[k], level=level+1)) raise object elif isinstance( object, arrayType ): data = object._aslist() for k in range(len(data)): object[k] = simplify(data[k], level=level+1) elif isinstance(object, structType): data = object._asdict() for k in data.keys(): if isPublic(k): setattr(object, k, simplify(data[k], level=level+1)) elif isinstance( object, compoundType ) : data = object._asdict() for k in data.keys(): if isPublic(k): object[k] = simplify(data[k], level=level+1) elif type(object)==DictType: for k in object.keys(): if isPublic(k): object[k] = simplify(object[k]) elif type(object)==list: for k in range(len(object)): object[k] = simplify(object[k]) return object
gpl-3.0
TraceContext/tracecontext-spec
test/server.py
1
3239
from aiohttp import ClientSession, ClientTimeout, ContentTypeError, web from multidict import MultiDict class AsyncTestServer(object): scopes = {} def __init__(self, host, port, timeout = 5): self.host = host self.port = port self.timeout = ClientTimeout(total = timeout) self.app = web.Application() self.app.add_routes([ web.post('/{scope}', self.scope_handler), ]) async def start(self): self.runner = web.AppRunner(self.app) await self.runner.setup() self.site = web.TCPSite(self.runner, self.host, self.port) await self.site.start() print('harness listening on http://%s:%s'%(self.host, self.port)) async def stop(self): await self.runner.cleanup() async def scope_handler(self, request): scope_id = request.match_info['scope'].split('.', maxsplit = 1) callback_id = None if len(scope_id) == 1 else scope_id[1] scope_id = scope_id[0] arguments = await request.json() scope = None if callback_id: scope = self.scopes[scope_id] scope[callback_id] = { 'headers': list(request.headers.items()), 'arguments': arguments, } else: scope = { 'headers': list(request.headers.items()), 'arguments': arguments, 'results': [], } self.scopes[scope_id] = scope if not arguments: return web.json_response(None) if not isinstance(arguments, list): arguments = [arguments] for action in arguments: headers = [['Accept', 'application/json']] if 'headers' in action: headers += action['headers'] async with ClientSession(headers = headers, timeout = self.timeout) as session: arguments = [] if 'arguments' in action: arguments = action['arguments'] or [] result = {} result['url'] = action['url'] scope['results'].append(result) try: async with session.post(action['url'], json = arguments) as response: result['status'] = response.status result['headers'] = list(response.headers.items()) result['body'] = await response.json(content_type = 'application/json') except ContentTypeError as err: result['body'] = await response.text() except Exception as err: result['exception'] = type(err).__name__ result['msg'] = str(err) if not callback_id: del self.scopes[scope_id] return web.json_response(scope) class TestServer(object): def __init__(self, host, port, timeout = 5): import asyncio from threading import Thread self.loop = asyncio.get_event_loop() self.server = AsyncTestServer(host, port, timeout) self.thread = Thread(target = self.monitor) self.run = True def monitor(self): import asyncio while self.run: self.loop.run_until_complete(asyncio.sleep(0.2)) def start(self): self.loop.run_until_complete(self.server.start()) self.thread.start() def stop(self): self.run = False self.thread.join() self.loop.run_until_complete(self.server.stop()) def __enter__(self): self.start() return self def __exit__(self, type, value, traceback): self.stop() if __name__ == '__main__': import sys host = '127.0.0.1' port = 7777 if len(sys.argv) >= 2: host = sys.argv[1] if len(sys.argv) >= 3: port = int(sys.argv[2]) with TestServer(host = host, port = port) as server: input('Press Enter to quit...')
apache-2.0
markhamstra/spark
examples/src/main/python/sql/arrow.py
13
3997
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ A simple example demonstrating Arrow in Spark. Run with: ./bin/spark-submit examples/src/main/python/sql/arrow.py """ from __future__ import print_function from pyspark.sql import SparkSession from pyspark.sql.utils import require_minimum_pandas_version, require_minimum_pyarrow_version require_minimum_pandas_version() require_minimum_pyarrow_version() def dataframe_with_arrow_example(spark): # $example on:dataframe_with_arrow$ import numpy as np import pandas as pd # Enable Arrow-based columnar data transfers spark.conf.set("spark.sql.execution.arrow.enabled", "true") # Generate a Pandas DataFrame pdf = pd.DataFrame(np.random.rand(100, 3)) # Create a Spark DataFrame from a Pandas DataFrame using Arrow df = spark.createDataFrame(pdf) # Convert the Spark DataFrame back to a Pandas DataFrame using Arrow result_pdf = df.select("*").toPandas() # $example off:dataframe_with_arrow$ print("Pandas DataFrame result statistics:\n%s\n" % str(result_pdf.describe())) def scalar_pandas_udf_example(spark): # $example on:scalar_pandas_udf$ import pandas as pd from pyspark.sql.functions import col, pandas_udf from pyspark.sql.types import LongType # Declare the function and create the UDF def multiply_func(a, b): return a * b multiply = pandas_udf(multiply_func, returnType=LongType()) # The function for a pandas_udf should be able to execute with local Pandas data x = pd.Series([1, 2, 3]) print(multiply_func(x, x)) # 0 1 # 1 4 # 2 9 # dtype: int64 # Create a Spark DataFrame, 'spark' is an existing SparkSession df = spark.createDataFrame(pd.DataFrame(x, columns=["x"])) # Execute function as a Spark vectorized UDF df.select(multiply(col("x"), col("x"))).show() # +-------------------+ # |multiply_func(x, x)| # +-------------------+ # | 1| # | 4| # | 9| # +-------------------+ # $example off:scalar_pandas_udf$ def grouped_map_pandas_udf_example(spark): # $example on:grouped_map_pandas_udf$ from pyspark.sql.functions import pandas_udf, PandasUDFType df = spark.createDataFrame( [(1, 1.0), (1, 2.0), (2, 3.0), (2, 5.0), (2, 10.0)], ("id", "v")) @pandas_udf("id long, v double", PandasUDFType.GROUPED_MAP) def substract_mean(pdf): # pdf is a pandas.DataFrame v = pdf.v return pdf.assign(v=v - v.mean()) df.groupby("id").apply(substract_mean).show() # +---+----+ # | id| v| # +---+----+ # | 1|-0.5| # | 1| 0.5| # | 2|-3.0| # | 2|-1.0| # | 2| 4.0| # +---+----+ # $example off:grouped_map_pandas_udf$ if __name__ == "__main__": spark = SparkSession \ .builder \ .appName("Python Arrow-in-Spark example") \ .getOrCreate() print("Running Pandas to/from conversion example") dataframe_with_arrow_example(spark) print("Running pandas_udf scalar example") scalar_pandas_udf_example(spark) print("Running pandas_udf grouped map example") grouped_map_pandas_udf_example(spark) spark.stop()
apache-2.0
rosmo/ansible-modules-core
cloud/openstack/os_ironic_node.py
131
12309
#!/usr/bin/python # coding: utf-8 -*- # (c) 2015, Hewlett-Packard Development Company, L.P. # # This module is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This software is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this software. If not, see <http://www.gnu.org/licenses/>. try: import shade HAS_SHADE = True except ImportError: HAS_SHADE = False DOCUMENTATION = ''' --- module: os_ironic_node short_description: Activate/Deactivate Bare Metal Resources from OpenStack author: "Monty Taylor (@emonty)" extends_documentation_fragment: openstack version_added: "2.0" description: - Deploy to nodes controlled by Ironic. options: state: description: - Indicates desired state of the resource choices: ['present', 'absent'] default: present deploy: description: - Indicates if the resource should be deployed. Allows for deployment logic to be disengaged and control of the node power or maintenance state to be changed. choices: ['true', 'false'] default: true uuid: description: - globally unique identifier (UUID) to be given to the resource. required: false default: None ironic_url: description: - If noauth mode is utilized, this is required to be set to the endpoint URL for the Ironic API. Use with "auth" and "auth_type" settings set to None. required: false default: None config_drive: description: - A configdrive file or HTTP(S) URL that will be passed along to the node. required: false default: None instance_info: description: - Definition of the instance information which is used to deploy the node. This information is only required when an instance is set to present. suboptions: image_source: description: - An HTTP(S) URL where the image can be retrieved from. image_checksum: description: - The checksum of image_source. image_disk_format: description: - The type of image that has been requested to be deployed. power: description: - A setting to allow power state to be asserted allowing nodes that are not yet deployed to be powered on, and nodes that are deployed to be powered off. choices: ['present', 'absent'] default: present maintenance: description: - A setting to allow the direct control if a node is in maintenance mode. required: false default: false maintenance_reason: description: - A string expression regarding the reason a node is in a maintenance mode. required: false default: None ''' EXAMPLES = ''' # Activate a node by booting an image with a configdrive attached os_ironic_node: cloud: "openstack" uuid: "d44666e1-35b3-4f6b-acb0-88ab7052da69" state: present power: present deploy: True maintenance: False config_drive: "http://192.168.1.1/host-configdrive.iso" instance_info: image_source: "http://192.168.1.1/deploy_image.img" image_checksum: "356a6b55ecc511a20c33c946c4e678af" image_disk_format: "qcow" delegate_to: localhost ''' def _choose_id_value(module): if module.params['uuid']: return module.params['uuid'] if module.params['name']: return module.params['name'] return None # TODO(TheJulia): Change this over to use the machine patch method # in shade once it is available. def _prepare_instance_info_patch(instance_info): patch = [] patch.append({ 'op': 'replace', 'path': '/instance_info', 'value': instance_info }) return patch def _is_true(value): true_values = [True, 'yes', 'Yes', 'True', 'true', 'present', 'on'] if value in true_values: return True return False def _is_false(value): false_values = [False, None, 'no', 'No', 'False', 'false', 'absent', 'off'] if value in false_values: return True return False def _check_set_maintenance(module, cloud, node): if _is_true(module.params['maintenance']): if _is_false(node['maintenance']): cloud.set_machine_maintenance_state( node['uuid'], True, reason=module.params['maintenance_reason']) module.exit_json(changed=True, msg="Node has been set into " "maintenance mode") else: # User has requested maintenance state, node is already in the # desired state, checking to see if the reason has changed. if (str(node['maintenance_reason']) not in str(module.params['maintenance_reason'])): cloud.set_machine_maintenance_state( node['uuid'], True, reason=module.params['maintenance_reason']) module.exit_json(changed=True, msg="Node maintenance reason " "updated, cannot take any " "additional action.") elif _is_false(module.params['maintenance']): if node['maintenance'] is True: cloud.remove_machine_from_maintenance(node['uuid']) return True else: module.fail_json(msg="maintenance parameter was set but a valid " "the value was not recognized.") return False def _check_set_power_state(module, cloud, node): if 'power on' in str(node['power_state']): if _is_false(module.params['power']): # User has requested the node be powered off. cloud.set_machine_power_off(node['uuid']) module.exit_json(changed=True, msg="Power requested off") if 'power off' in str(node['power_state']): if (_is_false(module.params['power']) and _is_false(module.params['state'])): return False if (_is_false(module.params['power']) and _is_false(module.params['state'])): module.exit_json( changed=False, msg="Power for node is %s, node must be reactivated " "OR set to state absent" ) # In the event the power has been toggled on and # deployment has been requested, we need to skip this # step. if (_is_true(module.params['power']) and _is_false(module.params['deploy'])): # Node is powered down when it is not awaiting to be provisioned cloud.set_machine_power_on(node['uuid']) return True # Default False if no action has been taken. return False def main(): argument_spec = openstack_full_argument_spec( uuid=dict(required=False), name=dict(required=False), instance_info=dict(type='dict', required=False), config_drive=dict(required=False), ironic_url=dict(required=False), state=dict(required=False, default='present'), maintenance=dict(required=False), maintenance_reason=dict(required=False), power=dict(required=False, default='present'), deploy=dict(required=False, default=True), ) module_kwargs = openstack_module_kwargs() module = AnsibleModule(argument_spec, **module_kwargs) if not HAS_SHADE: module.fail_json(msg='shade is required for this module') if (module.params['auth_type'] in [None, 'None'] and module.params['ironic_url'] is None): module.fail_json(msg="Authentication appears disabled, Please " "define an ironic_url parameter") if (module.params['ironic_url'] and module.params['auth_type'] in [None, 'None']): module.params['auth'] = dict( endpoint=module.params['ironic_url'] ) node_id = _choose_id_value(module) if not node_id: module.fail_json(msg="A uuid or name value must be defined " "to use this module.") try: cloud = shade.operator_cloud(**module.params) node = cloud.get_machine(node_id) if node is None: module.fail_json(msg="node not found") uuid = node['uuid'] instance_info = module.params['instance_info'] changed = False # User has reqeusted desired state to be in maintenance state. if module.params['state'] is 'maintenance': module.params['maintenance'] = True if node['provision_state'] in [ 'cleaning', 'deleting', 'wait call-back']: module.fail_json(msg="Node is in %s state, cannot act upon the " "request as the node is in a transition " "state" % node['provision_state']) # TODO(TheJulia) This is in-development code, that requires # code in the shade library that is still in development. if _check_set_maintenance(module, cloud, node): if node['provision_state'] in 'active': module.exit_json(changed=True, result="Maintenance state changed") changed = True node = cloud.get_machine(node_id) if _check_set_power_state(module, cloud, node): changed = True node = cloud.get_machine(node_id) if _is_true(module.params['state']): if _is_false(module.params['deploy']): module.exit_json( changed=changed, result="User request has explicitly disabled " "deployment logic" ) if 'active' in node['provision_state']: module.exit_json( changed=changed, result="Node already in an active state." ) if instance_info is None: module.fail_json( changed=changed, msg="When setting an instance to present, " "instance_info is a required variable.") # TODO(TheJulia): Update instance info, however info is # deployment specific. Perhaps consider adding rebuild # support, although there is a known desire to remove # rebuild support from Ironic at some point in the future. patch = _prepare_instance_info_patch(instance_info) cloud.set_node_instance_info(uuid, patch) cloud.validate_node(uuid) cloud.activate_node(uuid, module.params['config_drive']) # TODO(TheJulia): Add more error checking and a wait option. # We will need to loop, or just add the logic to shade, # although this could be a very long running process as # baremetal deployments are not a "quick" task. module.exit_json(changed=changed, result="node activated") elif _is_false(module.params['state']): if node['provision_state'] not in "deleted": cloud.purge_node_instance_info(uuid) cloud.deactivate_node(uuid) module.exit_json(changed=True, result="deleted") else: module.exit_json(changed=False, result="node not found") else: module.fail_json(msg="State must be present, absent, " "maintenance, off") except shade.OpenStackCloudException as e: module.fail_json(msg=e.message) # this is magic, see lib/ansible/module_common.py from ansible.module_utils.basic import * from ansible.module_utils.openstack import * main()
gpl-3.0
mnaberez/pypng
code/exnumpy.py
2
4828
#!/usr/bin/env python # $URL$ # $Rev$ # Numpy example. # Original code created by Mel Raab, modified by David Jones. ''' Example code integrating RGB PNG files, PyPNG and NumPy (abstracted from Mel Raab's functioning code) ''' # http://www.python.org/doc/2.4.4/lib/module-itertools.html import itertools import numpy import png ''' If you have a PNG file for an RGB image, and want to create a numpy array of data from it. ''' # Read the file "picture.png" from the current directory. The `Reader` # class can take a filename, a file-like object, or the byte data # directly; this suggests alternatives such as using urllib to read # an image from the internet: # png.Reader(file=urllib.urlopen('http://www.libpng.org/pub/png/PngSuite/basn2c16.png')) pngReader=png.Reader(filename='picture.png') # Tuple unpacking, using multiple assignment, is very useful for the # result of asDirect (and other methods). # See # http://docs.python.org/tutorial/introduction.html#first-steps-towards-programming row_count, column_count, pngdata, meta = pngReader.asDirect() bitdepth=meta['bitdepth'] plane_count=meta['planes'] # Make sure we're dealing with RGB files assert plane_count == 3 ''' Boxed row flat pixel: list([R,G,B, R,G,B, R,G,B], [R,G,B, R,G,B, R,G,B]) Array dimensions for this example: (2,9) Create `image_2d` as a two-dimensional NumPy array by stacking a sequence of 1-dimensional arrays (rows). The NumPy array mimics PyPNG's (boxed row flat pixel) representation; it will have dimensions ``(row_count,column_count*plane_count)``. ''' # The use of ``numpy.uint16``, below, is to convert each row to a NumPy # array with data type ``numpy.uint16``. This is a feature of NumPy, # discussed further in # http://docs.scipy.org/doc/numpy/user/basics.types.html . # You can use avoid the explicit conversion with # ``numpy.vstack(pngdata)``, but then NumPy will pick the array's data # type; in practice it seems to pick ``numpy.int32``, which is large enough # to hold any pixel value for any PNG image but uses 4 bytes per value when # 1 or 2 would be enough. # --- extract 001 start image_2d = numpy.vstack(itertools.imap(numpy.uint16, pngdata)) # --- extract 001 end # Do not be tempted to use ``numpy.asarray``; when passed an iterator # (`pngdata` is often an iterator) it will attempt to create a size 1 # array with the iterator as its only element. # An alternative to the above is to create the target array of the right # shape, then populate it row by row: if 0: image_2d = numpy.zeros((row_count,plane_count*column_count), dtype=numpy.uint16) for row_index, one_boxed_row_flat_pixels in enumerate(pngdata): image_2d[row_index,:]=one_boxed_row_flat_pixels del pngReader del pngdata ''' Reconfigure for easier referencing, similar to Boxed row boxed pixel: list([ (R,G,B), (R,G,B), (R,G,B) ], [ (R,G,B), (R,G,B), (R,G,B) ]) Array dimensions for this example: (2,3,3) ``image_3d`` will contain the image as a three-dimensional numpy array, having dimensions ``(row_count,column_count,plane_count)``. ''' # --- extract 002 start image_3d = numpy.reshape(image_2d, (row_count,column_count,plane_count)) # --- extract 002 end ''' ============= ''' ''' Convert NumPy image_3d array to PNG image file. If the data is three-dimensional, as it is above, the best thing to do is reshape it into a two-dimensional array with a shape of ``(row_count, column_count*plane_count)``. Because a two-dimensional numpy array is an iterator, it can be passed directly to the ``png.Writer.write`` method. ''' row_count, column_count, plane_count = image_3d.shape assert plane_count==3 pngfile = open('picture_out.png', 'wb') try: # This example assumes that you have 16-bit pixel values in the data # array (that's what the ``bitdepth=16`` argument is for). # If you don't, then the resulting PNG file will likely be # very dark. Hey, it's only an example. pngWriter = png.Writer(column_count, row_count, greyscale=False, alpha=False, bitdepth=16) # As of 2009-04-13 passing a numpy array that has an element type # that is a numpy integer type (for example, the `image_3d` array has an # element type of ``numpy.uint16``) generates a deprecation warning. # This is probably a bug in numpy; it may go away in the future. # The code still works despite the warning. # See http://code.google.com/p/pypng/issues/detail?id=44 # --- extract 003 start pngWriter.write(pngfile, numpy.reshape(image_3d, (-1, column_count*plane_count))) # --- extract 003 end finally: pngfile.close()
mit
40223101/w16b_test
static/Brython3.1.3-20150514-095342/Lib/threading.py
730
45641
"""Thread module emulating a subset of Java's threading model.""" import sys as _sys import _thread from time import sleep as _sleep try: from time import monotonic as _time except ImportError: from time import time as _time from traceback import format_exc as _format_exc from _weakrefset import WeakSet # Note regarding PEP 8 compliant names # This threading model was originally inspired by Java, and inherited # the convention of camelCase function and method names from that # language. Those original names are not in any imminent danger of # being deprecated (even for Py3k),so this module provides them as an # alias for the PEP 8 compliant names # Note that using the new PEP 8 compliant names facilitates substitution # with the multiprocessing module, which doesn't provide the old # Java inspired names. __all__ = ['active_count', 'Condition', 'current_thread', 'enumerate', 'Event', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread', 'Barrier', 'Timer', 'ThreadError', 'setprofile', 'settrace', 'local', 'stack_size'] # Rename some stuff so "from threading import *" is safe _start_new_thread = _thread.start_new_thread _allocate_lock = _thread.allocate_lock get_ident = _thread.get_ident ThreadError = _thread.error try: _CRLock = _thread.RLock except AttributeError: _CRLock = None TIMEOUT_MAX = _thread.TIMEOUT_MAX del _thread # Support for profile and trace hooks _profile_hook = None _trace_hook = None def setprofile(func): """Set a profile function for all threads started from the threading module. The func will be passed to sys.setprofile() for each thread, before its run() method is called. """ global _profile_hook _profile_hook = func def settrace(func): """Set a trace function for all threads started from the threading module. The func will be passed to sys.settrace() for each thread, before its run() method is called. """ global _trace_hook _trace_hook = func # Synchronization classes Lock = _allocate_lock def RLock(*args, **kwargs): """Factory function that returns a new reentrant lock. A reentrant lock must be released by the thread that acquired it. Once a thread has acquired a reentrant lock, the same thread may acquire it again without blocking; the thread must release it once for each time it has acquired it. """ if _CRLock is None: return _PyRLock(*args, **kwargs) return _CRLock(*args, **kwargs) class _RLock: """This class implements reentrant lock objects. A reentrant lock must be released by the thread that acquired it. Once a thread has acquired a reentrant lock, the same thread may acquire it again without blocking; the thread must release it once for each time it has acquired it. """ def __init__(self): self._block = _allocate_lock() self._owner = None self._count = 0 def __repr__(self): owner = self._owner try: owner = _active[owner].name except KeyError: pass return "<%s owner=%r count=%d>" % ( self.__class__.__name__, owner, self._count) def acquire(self, blocking=True, timeout=-1): """Acquire a lock, blocking or non-blocking. When invoked without arguments: if this thread already owns the lock, increment the recursion level by one, and return immediately. Otherwise, if another thread owns the lock, block until the lock is unlocked. Once the lock is unlocked (not owned by any thread), then grab ownership, set the recursion level to one, and return. If more than one thread is blocked waiting until the lock is unlocked, only one at a time will be able to grab ownership of the lock. There is no return value in this case. When invoked with the blocking argument set to true, do the same thing as when called without arguments, and return true. When invoked with the blocking argument set to false, do not block. If a call without an argument would block, return false immediately; otherwise, do the same thing as when called without arguments, and return true. When invoked with the floating-point timeout argument set to a positive value, block for at most the number of seconds specified by timeout and as long as the lock cannot be acquired. Return true if the lock has been acquired, false if the timeout has elapsed. """ me = get_ident() if self._owner == me: self._count = self._count + 1 return 1 rc = self._block.acquire(blocking, timeout) if rc: self._owner = me self._count = 1 return rc __enter__ = acquire def release(self): """Release a lock, decrementing the recursion level. If after the decrement it is zero, reset the lock to unlocked (not owned by any thread), and if any other threads are blocked waiting for the lock to become unlocked, allow exactly one of them to proceed. If after the decrement the recursion level is still nonzero, the lock remains locked and owned by the calling thread. Only call this method when the calling thread owns the lock. A RuntimeError is raised if this method is called when the lock is unlocked. There is no return value. """ if self._owner != get_ident(): raise RuntimeError("cannot release un-acquired lock") self._count = count = self._count - 1 if not count: self._owner = None self._block.release() def __exit__(self, t, v, tb): self.release() # Internal methods used by condition variables def _acquire_restore(self, state): self._block.acquire() self._count, self._owner = state def _release_save(self): if self._count == 0: raise RuntimeError("cannot release un-acquired lock") count = self._count self._count = 0 owner = self._owner self._owner = None self._block.release() return (count, owner) def _is_owned(self): return self._owner == get_ident() _PyRLock = _RLock class Condition: """Class that implements a condition variable. A condition variable allows one or more threads to wait until they are notified by another thread. If the lock argument is given and not None, it must be a Lock or RLock object, and it is used as the underlying lock. Otherwise, a new RLock object is created and used as the underlying lock. """ def __init__(self, lock=None): if lock is None: lock = RLock() self._lock = lock # Export the lock's acquire() and release() methods self.acquire = lock.acquire self.release = lock.release # If the lock defines _release_save() and/or _acquire_restore(), # these override the default implementations (which just call # release() and acquire() on the lock). Ditto for _is_owned(). try: self._release_save = lock._release_save except AttributeError: pass try: self._acquire_restore = lock._acquire_restore except AttributeError: pass try: self._is_owned = lock._is_owned except AttributeError: pass self._waiters = [] def __enter__(self): return self._lock.__enter__() def __exit__(self, *args): return self._lock.__exit__(*args) def __repr__(self): return "<Condition(%s, %d)>" % (self._lock, len(self._waiters)) def _release_save(self): self._lock.release() # No state to save def _acquire_restore(self, x): self._lock.acquire() # Ignore saved state def _is_owned(self): # Return True if lock is owned by current_thread. # This method is called only if __lock doesn't have _is_owned(). if self._lock.acquire(0): self._lock.release() return False else: return True def wait(self, timeout=None): """Wait until notified or until a timeout occurs. If the calling thread has not acquired the lock when this method is called, a RuntimeError is raised. This method releases the underlying lock, and then blocks until it is awakened by a notify() or notify_all() call for the same condition variable in another thread, or until the optional timeout occurs. Once awakened or timed out, it re-acquires the lock and returns. When the timeout argument is present and not None, it should be a floating point number specifying a timeout for the operation in seconds (or fractions thereof). When the underlying lock is an RLock, it is not released using its release() method, since this may not actually unlock the lock when it was acquired multiple times recursively. Instead, an internal interface of the RLock class is used, which really unlocks it even when it has been recursively acquired several times. Another internal interface is then used to restore the recursion level when the lock is reacquired. """ if not self._is_owned(): raise RuntimeError("cannot wait on un-acquired lock") waiter = _allocate_lock() waiter.acquire() self._waiters.append(waiter) saved_state = self._release_save() try: # restore state no matter what (e.g., KeyboardInterrupt) if timeout is None: waiter.acquire() gotit = True else: if timeout > 0: gotit = waiter.acquire(True, timeout) else: gotit = waiter.acquire(False) if not gotit: try: self._waiters.remove(waiter) except ValueError: pass return gotit finally: self._acquire_restore(saved_state) def wait_for(self, predicate, timeout=None): """Wait until a condition evaluates to True. predicate should be a callable which result will be interpreted as a boolean value. A timeout may be provided giving the maximum time to wait. """ endtime = None waittime = timeout result = predicate() while not result: if waittime is not None: if endtime is None: endtime = _time() + waittime else: waittime = endtime - _time() if waittime <= 0: break self.wait(waittime) result = predicate() return result def notify(self, n=1): """Wake up one or more threads waiting on this condition, if any. If the calling thread has not acquired the lock when this method is called, a RuntimeError is raised. This method wakes up at most n of the threads waiting for the condition variable; it is a no-op if no threads are waiting. """ if not self._is_owned(): raise RuntimeError("cannot notify on un-acquired lock") __waiters = self._waiters waiters = __waiters[:n] if not waiters: return for waiter in waiters: waiter.release() try: __waiters.remove(waiter) except ValueError: pass def notify_all(self): """Wake up all threads waiting on this condition. If the calling thread has not acquired the lock when this method is called, a RuntimeError is raised. """ self.notify(len(self._waiters)) notifyAll = notify_all class Semaphore: """This class implements semaphore objects. Semaphores manage a counter representing the number of release() calls minus the number of acquire() calls, plus an initial value. The acquire() method blocks if necessary until it can return without making the counter negative. If not given, value defaults to 1. """ # After Tim Peters' semaphore class, but not quite the same (no maximum) def __init__(self, value=1): if value < 0: raise ValueError("semaphore initial value must be >= 0") self._cond = Condition(Lock()) self._value = value def acquire(self, blocking=True, timeout=None): """Acquire a semaphore, decrementing the internal counter by one. When invoked without arguments: if the internal counter is larger than zero on entry, decrement it by one and return immediately. If it is zero on entry, block, waiting until some other thread has called release() to make it larger than zero. This is done with proper interlocking so that if multiple acquire() calls are blocked, release() will wake exactly one of them up. The implementation may pick one at random, so the order in which blocked threads are awakened should not be relied on. There is no return value in this case. When invoked with blocking set to true, do the same thing as when called without arguments, and return true. When invoked with blocking set to false, do not block. If a call without an argument would block, return false immediately; otherwise, do the same thing as when called without arguments, and return true. When invoked with a timeout other than None, it will block for at most timeout seconds. If acquire does not complete successfully in that interval, return false. Return true otherwise. """ if not blocking and timeout is not None: raise ValueError("can't specify timeout for non-blocking acquire") rc = False endtime = None with self._cond: while self._value == 0: if not blocking: break if timeout is not None: if endtime is None: endtime = _time() + timeout else: timeout = endtime - _time() if timeout <= 0: break self._cond.wait(timeout) else: self._value = self._value - 1 rc = True return rc __enter__ = acquire def release(self): """Release a semaphore, incrementing the internal counter by one. When the counter is zero on entry and another thread is waiting for it to become larger than zero again, wake up that thread. """ with self._cond: self._value = self._value + 1 self._cond.notify() def __exit__(self, t, v, tb): self.release() class BoundedSemaphore(Semaphore): """Implements a bounded semaphore. A bounded semaphore checks to make sure its current value doesn't exceed its initial value. If it does, ValueError is raised. In most situations semaphores are used to guard resources with limited capacity. If the semaphore is released too many times it's a sign of a bug. If not given, value defaults to 1. Like regular semaphores, bounded semaphores manage a counter representing the number of release() calls minus the number of acquire() calls, plus an initial value. The acquire() method blocks if necessary until it can return without making the counter negative. If not given, value defaults to 1. """ def __init__(self, value=1): Semaphore.__init__(self, value) self._initial_value = value def release(self): """Release a semaphore, incrementing the internal counter by one. When the counter is zero on entry and another thread is waiting for it to become larger than zero again, wake up that thread. If the number of releases exceeds the number of acquires, raise a ValueError. """ with self._cond: if self._value >= self._initial_value: raise ValueError("Semaphore released too many times") self._value += 1 self._cond.notify() class Event: """Class implementing event objects. Events manage a flag that can be set to true with the set() method and reset to false with the clear() method. The wait() method blocks until the flag is true. The flag is initially false. """ # After Tim Peters' event class (without is_posted()) def __init__(self): self._cond = Condition(Lock()) self._flag = False def _reset_internal_locks(self): # private! called by Thread._reset_internal_locks by _after_fork() self._cond.__init__() def is_set(self): """Return true if and only if the internal flag is true.""" return self._flag isSet = is_set def set(self): """Set the internal flag to true. All threads waiting for it to become true are awakened. Threads that call wait() once the flag is true will not block at all. """ self._cond.acquire() try: self._flag = True self._cond.notify_all() finally: self._cond.release() def clear(self): """Reset the internal flag to false. Subsequently, threads calling wait() will block until set() is called to set the internal flag to true again. """ self._cond.acquire() try: self._flag = False finally: self._cond.release() def wait(self, timeout=None): """Block until the internal flag is true. If the internal flag is true on entry, return immediately. Otherwise, block until another thread calls set() to set the flag to true, or until the optional timeout occurs. When the timeout argument is present and not None, it should be a floating point number specifying a timeout for the operation in seconds (or fractions thereof). This method returns the internal flag on exit, so it will always return True except if a timeout is given and the operation times out. """ self._cond.acquire() try: signaled = self._flag if not signaled: signaled = self._cond.wait(timeout) return signaled finally: self._cond.release() # A barrier class. Inspired in part by the pthread_barrier_* api and # the CyclicBarrier class from Java. See # http://sourceware.org/pthreads-win32/manual/pthread_barrier_init.html and # http://java.sun.com/j2se/1.5.0/docs/api/java/util/concurrent/ # CyclicBarrier.html # for information. # We maintain two main states, 'filling' and 'draining' enabling the barrier # to be cyclic. Threads are not allowed into it until it has fully drained # since the previous cycle. In addition, a 'resetting' state exists which is # similar to 'draining' except that threads leave with a BrokenBarrierError, # and a 'broken' state in which all threads get the exception. class Barrier: """Implements a Barrier. Useful for synchronizing a fixed number of threads at known synchronization points. Threads block on 'wait()' and are simultaneously once they have all made that call. """ def __init__(self, parties, action=None, timeout=None): """Create a barrier, initialised to 'parties' threads. 'action' is a callable which, when supplied, will be called by one of the threads after they have all entered the barrier and just prior to releasing them all. If a 'timeout' is provided, it is uses as the default for all subsequent 'wait()' calls. """ self._cond = Condition(Lock()) self._action = action self._timeout = timeout self._parties = parties self._state = 0 #0 filling, 1, draining, -1 resetting, -2 broken self._count = 0 def wait(self, timeout=None): """Wait for the barrier. When the specified number of threads have started waiting, they are all simultaneously awoken. If an 'action' was provided for the barrier, one of the threads will have executed that callback prior to returning. Returns an individual index number from 0 to 'parties-1'. """ if timeout is None: timeout = self._timeout with self._cond: self._enter() # Block while the barrier drains. index = self._count self._count += 1 try: if index + 1 == self._parties: # We release the barrier self._release() else: # We wait until someone releases us self._wait(timeout) return index finally: self._count -= 1 # Wake up any threads waiting for barrier to drain. self._exit() # Block until the barrier is ready for us, or raise an exception # if it is broken. def _enter(self): while self._state in (-1, 1): # It is draining or resetting, wait until done self._cond.wait() #see if the barrier is in a broken state if self._state < 0: raise BrokenBarrierError assert self._state == 0 # Optionally run the 'action' and release the threads waiting # in the barrier. def _release(self): try: if self._action: self._action() # enter draining state self._state = 1 self._cond.notify_all() except: #an exception during the _action handler. Break and reraise self._break() raise # Wait in the barrier until we are relased. Raise an exception # if the barrier is reset or broken. def _wait(self, timeout): if not self._cond.wait_for(lambda : self._state != 0, timeout): #timed out. Break the barrier self._break() raise BrokenBarrierError if self._state < 0: raise BrokenBarrierError assert self._state == 1 # If we are the last thread to exit the barrier, signal any threads # waiting for the barrier to drain. def _exit(self): if self._count == 0: if self._state in (-1, 1): #resetting or draining self._state = 0 self._cond.notify_all() def reset(self): """Reset the barrier to the initial state. Any threads currently waiting will get the BrokenBarrier exception raised. """ with self._cond: if self._count > 0: if self._state == 0: #reset the barrier, waking up threads self._state = -1 elif self._state == -2: #was broken, set it to reset state #which clears when the last thread exits self._state = -1 else: self._state = 0 self._cond.notify_all() def abort(self): """Place the barrier into a 'broken' state. Useful in case of error. Any currently waiting threads and threads attempting to 'wait()' will have BrokenBarrierError raised. """ with self._cond: self._break() def _break(self): # An internal error was detected. The barrier is set to # a broken state all parties awakened. self._state = -2 self._cond.notify_all() @property def parties(self): """Return the number of threads required to trip the barrier.""" return self._parties @property def n_waiting(self): """Return the number of threads currently waiting at the barrier.""" # We don't need synchronization here since this is an ephemeral result # anyway. It returns the correct value in the steady state. if self._state == 0: return self._count return 0 @property def broken(self): """Return True if the barrier is in a broken state.""" return self._state == -2 # exception raised by the Barrier class class BrokenBarrierError(RuntimeError): pass # Helper to generate new thread names _counter = 0 def _newname(template="Thread-%d"): global _counter _counter = _counter + 1 return template % _counter # Active thread administration _active_limbo_lock = _allocate_lock() _active = {} # maps thread id to Thread object _limbo = {} # For debug and leak testing _dangling = WeakSet() # Main class for threads class Thread: """A class that represents a thread of control. This class can be safely subclassed in a limited fashion. There are two ways to specify the activity: by passing a callable object to the constructor, or by overriding the run() method in a subclass. """ __initialized = False # Need to store a reference to sys.exc_info for printing # out exceptions when a thread tries to use a global var. during interp. # shutdown and thus raises an exception about trying to perform some # operation on/with a NoneType __exc_info = _sys.exc_info # Keep sys.exc_clear too to clear the exception just before # allowing .join() to return. #XXX __exc_clear = _sys.exc_clear def __init__(self, group=None, target=None, name=None, args=(), kwargs=None, *, daemon=None): """This constructor should always be called with keyword arguments. Arguments are: *group* should be None; reserved for future extension when a ThreadGroup class is implemented. *target* is the callable object to be invoked by the run() method. Defaults to None, meaning nothing is called. *name* is the thread name. By default, a unique name is constructed of the form "Thread-N" where N is a small decimal number. *args* is the argument tuple for the target invocation. Defaults to (). *kwargs* is a dictionary of keyword arguments for the target invocation. Defaults to {}. If a subclass overrides the constructor, it must make sure to invoke the base class constructor (Thread.__init__()) before doing anything else to the thread. """ assert group is None, "group argument must be None for now" if kwargs is None: kwargs = {} self._target = target self._name = str(name or _newname()) self._args = args self._kwargs = kwargs if daemon is not None: self._daemonic = daemon else: self._daemonic = current_thread().daemon self._ident = None self._started = Event() self._stopped = False self._block = Condition(Lock()) self._initialized = True # sys.stderr is not stored in the class like # sys.exc_info since it can be changed between instances self._stderr = _sys.stderr _dangling.add(self) def _reset_internal_locks(self): # private! Called by _after_fork() to reset our internal locks as # they may be in an invalid state leading to a deadlock or crash. if hasattr(self, '_block'): # DummyThread deletes _block self._block.__init__() self._started._reset_internal_locks() def __repr__(self): assert self._initialized, "Thread.__init__() was not called" status = "initial" if self._started.is_set(): status = "started" if self._stopped: status = "stopped" if self._daemonic: status += " daemon" if self._ident is not None: status += " %s" % self._ident return "<%s(%s, %s)>" % (self.__class__.__name__, self._name, status) def start(self): """Start the thread's activity. It must be called at most once per thread object. It arranges for the object's run() method to be invoked in a separate thread of control. This method will raise a RuntimeError if called more than once on the same thread object. """ if not self._initialized: raise RuntimeError("thread.__init__() not called") if self._started.is_set(): raise RuntimeError("threads can only be started once") with _active_limbo_lock: _limbo[self] = self try: _start_new_thread(self._bootstrap, ()) except Exception: with _active_limbo_lock: del _limbo[self] raise self._started.wait() def run(self): """Method representing the thread's activity. You may override this method in a subclass. The standard run() method invokes the callable object passed to the object's constructor as the target argument, if any, with sequential and keyword arguments taken from the args and kwargs arguments, respectively. """ try: if self._target: self._target(*self._args, **self._kwargs) finally: # Avoid a refcycle if the thread is running a function with # an argument that has a member that points to the thread. del self._target, self._args, self._kwargs def _bootstrap(self): # Wrapper around the real bootstrap code that ignores # exceptions during interpreter cleanup. Those typically # happen when a daemon thread wakes up at an unfortunate # moment, finds the world around it destroyed, and raises some # random exception *** while trying to report the exception in # _bootstrap_inner() below ***. Those random exceptions # don't help anybody, and they confuse users, so we suppress # them. We suppress them only when it appears that the world # indeed has already been destroyed, so that exceptions in # _bootstrap_inner() during normal business hours are properly # reported. Also, we only suppress them for daemonic threads; # if a non-daemonic encounters this, something else is wrong. try: self._bootstrap_inner() except: if self._daemonic and _sys is None: return raise def _set_ident(self): self._ident = get_ident() def _bootstrap_inner(self): try: self._set_ident() self._started.set() with _active_limbo_lock: _active[self._ident] = self del _limbo[self] if _trace_hook: _sys.settrace(_trace_hook) if _profile_hook: _sys.setprofile(_profile_hook) try: self.run() except SystemExit: pass except: # If sys.stderr is no more (most likely from interpreter # shutdown) use self._stderr. Otherwise still use sys (as in # _sys) in case sys.stderr was redefined since the creation of # self. if _sys: _sys.stderr.write("Exception in thread %s:\n%s\n" % (self.name, _format_exc())) else: # Do the best job possible w/o a huge amt. of code to # approximate a traceback (code ideas from # Lib/traceback.py) exc_type, exc_value, exc_tb = self._exc_info() try: print(( "Exception in thread " + self.name + " (most likely raised during interpreter shutdown):"), file=self._stderr) print(( "Traceback (most recent call last):"), file=self._stderr) while exc_tb: print(( ' File "%s", line %s, in %s' % (exc_tb.tb_frame.f_code.co_filename, exc_tb.tb_lineno, exc_tb.tb_frame.f_code.co_name)), file=self._stderr) exc_tb = exc_tb.tb_next print(("%s: %s" % (exc_type, exc_value)), file=self._stderr) # Make sure that exc_tb gets deleted since it is a memory # hog; deleting everything else is just for thoroughness finally: del exc_type, exc_value, exc_tb finally: # Prevent a race in # test_threading.test_no_refcycle_through_target when # the exception keeps the target alive past when we # assert that it's dead. #XXX self.__exc_clear() pass finally: with _active_limbo_lock: self._stop() try: # We don't call self._delete() because it also # grabs _active_limbo_lock. del _active[get_ident()] except: pass def _stop(self): self._block.acquire() self._stopped = True self._block.notify_all() self._block.release() def _delete(self): "Remove current thread from the dict of currently running threads." # Notes about running with _dummy_thread: # # Must take care to not raise an exception if _dummy_thread is being # used (and thus this module is being used as an instance of # dummy_threading). _dummy_thread.get_ident() always returns -1 since # there is only one thread if _dummy_thread is being used. Thus # len(_active) is always <= 1 here, and any Thread instance created # overwrites the (if any) thread currently registered in _active. # # An instance of _MainThread is always created by 'threading'. This # gets overwritten the instant an instance of Thread is created; both # threads return -1 from _dummy_thread.get_ident() and thus have the # same key in the dict. So when the _MainThread instance created by # 'threading' tries to clean itself up when atexit calls this method # it gets a KeyError if another Thread instance was created. # # This all means that KeyError from trying to delete something from # _active if dummy_threading is being used is a red herring. But # since it isn't if dummy_threading is *not* being used then don't # hide the exception. try: with _active_limbo_lock: del _active[get_ident()] # There must not be any python code between the previous line # and after the lock is released. Otherwise a tracing function # could try to acquire the lock again in the same thread, (in # current_thread()), and would block. except KeyError: if 'dummy_threading' not in _sys.modules: raise def join(self, timeout=None): """Wait until the thread terminates. This blocks the calling thread until the thread whose join() method is called terminates -- either normally or through an unhandled exception or until the optional timeout occurs. When the timeout argument is present and not None, it should be a floating point number specifying a timeout for the operation in seconds (or fractions thereof). As join() always returns None, you must call isAlive() after join() to decide whether a timeout happened -- if the thread is still alive, the join() call timed out. When the timeout argument is not present or None, the operation will block until the thread terminates. A thread can be join()ed many times. join() raises a RuntimeError if an attempt is made to join the current thread as that would cause a deadlock. It is also an error to join() a thread before it has been started and attempts to do so raises the same exception. """ if not self._initialized: raise RuntimeError("Thread.__init__() not called") if not self._started.is_set(): raise RuntimeError("cannot join thread before it is started") if self is current_thread(): raise RuntimeError("cannot join current thread") self._block.acquire() try: if timeout is None: while not self._stopped: self._block.wait() else: deadline = _time() + timeout while not self._stopped: delay = deadline - _time() if delay <= 0: break self._block.wait(delay) finally: self._block.release() @property def name(self): """A string used for identification purposes only. It has no semantics. Multiple threads may be given the same name. The initial name is set by the constructor. """ assert self._initialized, "Thread.__init__() not called" return self._name @name.setter def name(self, name): assert self._initialized, "Thread.__init__() not called" self._name = str(name) @property def ident(self): """Thread identifier of this thread or None if it has not been started. This is a nonzero integer. See the thread.get_ident() function. Thread identifiers may be recycled when a thread exits and another thread is created. The identifier is available even after the thread has exited. """ assert self._initialized, "Thread.__init__() not called" return self._ident def is_alive(self): """Return whether the thread is alive. This method returns True just before the run() method starts until just after the run() method terminates. The module function enumerate() returns a list of all alive threads. """ assert self._initialized, "Thread.__init__() not called" return self._started.is_set() and not self._stopped isAlive = is_alive @property def daemon(self): """A boolean value indicating whether this thread is a daemon thread. This must be set before start() is called, otherwise RuntimeError is raised. Its initial value is inherited from the creating thread; the main thread is not a daemon thread and therefore all threads created in the main thread default to daemon = False. The entire Python program exits when no alive non-daemon threads are left. """ assert self._initialized, "Thread.__init__() not called" return self._daemonic @daemon.setter def daemon(self, daemonic): if not self._initialized: raise RuntimeError("Thread.__init__() not called") if self._started.is_set(): raise RuntimeError("cannot set daemon status of active thread"); self._daemonic = daemonic def isDaemon(self): return self.daemon def setDaemon(self, daemonic): self.daemon = daemonic def getName(self): return self.name def setName(self, name): self.name = name # The timer class was contributed by Itamar Shtull-Trauring class Timer(Thread): """Call a function after a specified number of seconds: t = Timer(30.0, f, args=None, kwargs=None) t.start() t.cancel() # stop the timer's action if it's still waiting """ def __init__(self, interval, function, args=None, kwargs=None): Thread.__init__(self) self.interval = interval self.function = function self.args = args if args is not None else [] self.kwargs = kwargs if kwargs is not None else {} self.finished = Event() def cancel(self): """Stop the timer if it hasn't finished yet.""" self.finished.set() def run(self): self.finished.wait(self.interval) if not self.finished.is_set(): self.function(*self.args, **self.kwargs) self.finished.set() # Special thread class to represent the main thread # This is garbage collected through an exit handler class _MainThread(Thread): def __init__(self): Thread.__init__(self, name="MainThread", daemon=False) self._started.set() self._set_ident() with _active_limbo_lock: _active[self._ident] = self def _exitfunc(self): self._stop() t = _pickSomeNonDaemonThread() while t: t.join() t = _pickSomeNonDaemonThread() self._delete() def _pickSomeNonDaemonThread(): for t in enumerate(): if not t.daemon and t.is_alive(): return t return None # Dummy thread class to represent threads not started here. # These aren't garbage collected when they die, nor can they be waited for. # If they invoke anything in threading.py that calls current_thread(), they # leave an entry in the _active dict forever after. # Their purpose is to return *something* from current_thread(). # They are marked as daemon threads so we won't wait for them # when we exit (conform previous semantics). class _DummyThread(Thread): def __init__(self): Thread.__init__(self, name=_newname("Dummy-%d"), daemon=True) # Thread._block consumes an OS-level locking primitive, which # can never be used by a _DummyThread. Since a _DummyThread # instance is immortal, that's bad, so release this resource. del self._block self._started.set() self._set_ident() with _active_limbo_lock: _active[self._ident] = self def _stop(self): pass def join(self, timeout=None): assert False, "cannot join a dummy thread" # Global API functions def current_thread(): """Return the current Thread object, corresponding to the caller's thread of control. If the caller's thread of control was not created through the threading module, a dummy thread object with limited functionality is returned. """ try: return _active[get_ident()] except KeyError: return _DummyThread() currentThread = current_thread def active_count(): """Return the number of Thread objects currently alive. The returned count is equal to the length of the list returned by enumerate(). """ with _active_limbo_lock: return len(_active) + len(_limbo) activeCount = active_count def _enumerate(): # Same as enumerate(), but without the lock. Internal use only. return list(_active.values()) + list(_limbo.values()) def enumerate(): """Return a list of all Thread objects currently alive. The list includes daemonic threads, dummy thread objects created by current_thread(), and the main thread. It excludes terminated threads and threads that have not yet been started. """ with _active_limbo_lock: return list(_active.values()) + list(_limbo.values()) from _thread import stack_size # Create the main thread object, # and make it available for the interpreter # (Py_Main) as threading._shutdown. _shutdown = _MainThread()._exitfunc # get thread-local implementation, either from the thread # module, or from the python fallback try: from _thread import _local as local except ImportError: from _threading_local import local def _after_fork(): # This function is called by Python/ceval.c:PyEval_ReInitThreads which # is called from PyOS_AfterFork. Here we cleanup threading module state # that should not exist after a fork. # Reset _active_limbo_lock, in case we forked while the lock was held # by another (non-forked) thread. http://bugs.python.org/issue874900 global _active_limbo_lock _active_limbo_lock = _allocate_lock() # fork() only copied the current thread; clear references to others. new_active = {} current = current_thread() with _active_limbo_lock: for thread in _enumerate(): # Any lock/condition variable may be currently locked or in an # invalid state, so we reinitialize them. thread._reset_internal_locks() if thread is current: # There is only one active thread. We reset the ident to # its new value since it can have changed. ident = get_ident() thread._ident = ident new_active[ident] = thread else: # All the others are already stopped. thread._stop() _limbo.clear() _active.clear() _active.update(new_active) assert len(_active) == 1
agpl-3.0
AOSPA-L/android_kernel_oppo_msm8974
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py
11088
3246
# Core.py - Python extension for perf script, core functions # # Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com> # # This software may be distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. from collections import defaultdict def autodict(): return defaultdict(autodict) flag_fields = autodict() symbolic_fields = autodict() def define_flag_field(event_name, field_name, delim): flag_fields[event_name][field_name]['delim'] = delim def define_flag_value(event_name, field_name, value, field_str): flag_fields[event_name][field_name]['values'][value] = field_str def define_symbolic_field(event_name, field_name): # nothing to do, really pass def define_symbolic_value(event_name, field_name, value, field_str): symbolic_fields[event_name][field_name]['values'][value] = field_str def flag_str(event_name, field_name, value): string = "" if flag_fields[event_name][field_name]: print_delim = 0 keys = flag_fields[event_name][field_name]['values'].keys() keys.sort() for idx in keys: if not value and not idx: string += flag_fields[event_name][field_name]['values'][idx] break if idx and (value & idx) == idx: if print_delim and flag_fields[event_name][field_name]['delim']: string += " " + flag_fields[event_name][field_name]['delim'] + " " string += flag_fields[event_name][field_name]['values'][idx] print_delim = 1 value &= ~idx return string def symbol_str(event_name, field_name, value): string = "" if symbolic_fields[event_name][field_name]: keys = symbolic_fields[event_name][field_name]['values'].keys() keys.sort() for idx in keys: if not value and not idx: string = symbolic_fields[event_name][field_name]['values'][idx] break if (value == idx): string = symbolic_fields[event_name][field_name]['values'][idx] break return string trace_flags = { 0x00: "NONE", \ 0x01: "IRQS_OFF", \ 0x02: "IRQS_NOSUPPORT", \ 0x04: "NEED_RESCHED", \ 0x08: "HARDIRQ", \ 0x10: "SOFTIRQ" } def trace_flag_str(value): string = "" print_delim = 0 keys = trace_flags.keys() for idx in keys: if not value and not idx: string += "NONE" break if idx and (value & idx) == idx: if print_delim: string += " | "; string += trace_flags[idx] print_delim = 1 value &= ~idx return string def taskState(state): states = { 0 : "R", 1 : "S", 2 : "D", 64: "DEAD" } if state not in states: return "Unknown" return states[state] class EventHeaders: def __init__(self, common_cpu, common_secs, common_nsecs, common_pid, common_comm): self.cpu = common_cpu self.secs = common_secs self.nsecs = common_nsecs self.pid = common_pid self.comm = common_comm def ts(self): return (self.secs * (10 ** 9)) + self.nsecs def ts_format(self): return "%d.%d" % (self.secs, int(self.nsecs / 1000))
gpl-2.0
azurestandard/django
tests/regressiontests/localflavor/tests.py
21
1815
from __future__ import absolute_import from .ar.tests import ARLocalFlavorTests from .at.tests import ATLocalFlavorTests from .au.tests import AULocalflavorTests from .be.tests import BELocalFlavorTests from .br.tests import BRLocalFlavorTests from .ca.tests import CALocalFlavorTests from .ch.tests import CHLocalFlavorTests from .cl.tests import CLLocalFlavorTests from .cn.tests import CNLocalFlavorTests from .co.tests import COLocalFlavorTests from .cz.tests import CZLocalFlavorTests from .de.tests import DELocalFlavorTests from .ec.tests import ECLocalFlavorTests from .es.tests import ESLocalFlavorTests from .fi.tests import FILocalFlavorTests from .fr.tests import FRLocalFlavorTests from .gb.tests import GBLocalFlavorTests from .generic.tests import GenericLocalFlavorTests from .hk.tests import HKLocalFlavorTests from .hr.tests import HRLocalFlavorTests from .id.tests import IDLocalFlavorTests from .ie.tests import IELocalFlavorTests from .il.tests import ILLocalFlavorTests from .in_.tests import INLocalFlavorTests from .is_.tests import ISLocalFlavorTests from .it.tests import ITLocalFlavorTests from .jp.tests import JPLocalFlavorTests from .kw.tests import KWLocalFlavorTests from .mk.tests import MKLocalFlavorTests from .mx.tests import MXLocalFlavorTests from .nl.tests import NLLocalFlavorTests from .pl.tests import PLLocalFlavorTests from .pt.tests import PTLocalFlavorTests from .py.tests import PYLocalFlavorTests from .ro.tests import ROLocalFlavorTests from .ru.tests import RULocalFlavorTests from .se.tests import SELocalFlavorTests from .si.tests import SILocalFlavorTests from .sk.tests import SKLocalFlavorTests from .tr.tests import TRLocalFlavorTests from .us.tests import USLocalFlavorTests from .uy.tests import UYLocalFlavorTests from .za.tests import ZALocalFlavorTests
bsd-3-clause
spektom/incubator-airflow
airflow/utils/decorators.py
5
3422
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import inspect import os from copy import copy from functools import wraps from airflow.exceptions import AirflowException signature = inspect.signature def apply_defaults(func): """ Function decorator that Looks for an argument named "default_args", and fills the unspecified arguments from it. Since python2.* isn't clear about which arguments are missing when calling a function, and that this can be quite confusing with multi-level inheritance and argument defaults, this decorator also alerts with specific information about the missing arguments. """ # Cache inspect.signature for the wrapper closure to avoid calling it # at every decorated invocation. This is separate sig_cache created # per decoration, i.e. each function decorated using apply_defaults will # have a different sig_cache. sig_cache = signature(func) non_optional_args = { name for (name, param) in sig_cache.parameters.items() if param.default == param.empty and param.name != 'self' and param.kind not in (param.VAR_POSITIONAL, param.VAR_KEYWORD)} @wraps(func) def wrapper(*args, **kwargs): from airflow.models.dag import DagContext if len(args) > 1: raise AirflowException( "Use keyword arguments when initializing operators") dag_args = {} dag_params = {} dag = kwargs.get('dag', None) or DagContext.get_current_dag() if dag: dag_args = copy(dag.default_args) or {} dag_params = copy(dag.params) or {} params = kwargs.get('params', {}) or {} dag_params.update(params) default_args = {} if 'default_args' in kwargs: default_args = kwargs['default_args'] if 'params' in default_args: dag_params.update(default_args['params']) del default_args['params'] dag_args.update(default_args) default_args = dag_args for arg in sig_cache.parameters: if arg not in kwargs and arg in default_args: kwargs[arg] = default_args[arg] missing_args = list(non_optional_args - set(kwargs)) if missing_args: msg = "Argument {0} is required".format(missing_args) raise AirflowException(msg) kwargs['params'] = dag_params result = func(*args, **kwargs) return result return wrapper if 'BUILDING_AIRFLOW_DOCS' in os.environ: # flake8: noqa: F811 # Monkey patch hook to get good function headers while building docs apply_defaults = lambda x: x
apache-2.0
LegitSavage/namebench
libnamebench/better_webbrowser.py
175
4191
#!/usr/bin/env python # Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Wrapper for webbrowser library, to invoke the http handler on win32.""" __author__ = 'tstromberg@google.com (Thomas Stromberg)' import os.path import subprocess import sys import traceback import webbrowser import util def output(string): print string def create_win32_http_cmd(url): """Create a command-line tuple to launch a web browser for a given URL. Args: url: string Returns: tuple of: (executable, arg1, arg2, ...) At the moment, this ignores all default arguments to the browser. TODO(tstromberg): Properly parse the command-line arguments. """ browser_type = None try: key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 'Software\Classes\http\shell\open\command') browser_type = 'user' except WindowsError: key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'Software\Classes\http\shell\open\command') browser_type = 'machine' except: return False cmd = _winreg.EnumValue(key, 0)[1] # "C:\blah blah\iexplore.exe" -nohome # "C:\blah blah\firefox.exe" -requestPending -osint -url "%1" if '"' in cmd: executable = cmd.split('"')[1] else: executable = cmd.split(' ')[0] if not os.path.exists(executable): output('$ Default HTTP browser does not exist: %s' % executable) return False else: output('$ %s HTTP handler: %s' % (browser_type, executable)) return (executable, url) def open(url): """Opens a URL, overriding the normal webbrowser.open methods for sanity.""" try: webbrowser.open(url, new=1, autoraise=True) # If the user is missing the osascript binary - see # http://code.google.com/p/namebench/issues/detail?id=88 except: output('Failed to open: [%s]: %s' % (url, util.GetLastExceptionString())) if os.path.exists('/usr/bin/open'): try: output('trying open: %s' % url) p = subprocess.Popen(('open', url)) p.wait() except: output('open did not seem to work: %s' % util.GetLastExceptionString()) elif sys.platform[:3] == 'win': try: output('trying default Windows controller: %s' % url) controller = webbrowser.get('windows-default') controller.open_new(url) except: output('WindowsController did not work: %s' % util.GetLastExceptionString()) # *NOTE*: EVIL IMPORT SIDE EFFECTS AHEAD! # # If we are running on Windows, register the WindowsHttpDefault class. if sys.platform[:3] == 'win': import _winreg # We don't want to load this class by default, because Python 2.4 doesn't have BaseBrowser. class WindowsHttpDefault(webbrowser.BaseBrowser): """Provide an alternate open class for Windows user, using the http handler.""" def open(self, url, new=0, autoraise=1): command_args = create_win32_http_cmd(url) if not command_args: output('$ Could not find HTTP handler') return False output('command_args:') output(command_args) # Avoid some unicode path issues by moving our current directory old_pwd = os.getcwd() os.chdir('C:\\') try: _unused = subprocess.Popen(command_args) os.chdir(old_pwd) return True except: traceback.print_exc() output('$ Failed to run HTTP handler, trying next browser.') os.chdir(old_pwd) return False webbrowser.register('windows-http', WindowsHttpDefault, update_tryorder=-1)
apache-2.0
garretlh/nimbus
nimbus-main/src/main/python/nimbuscfg/config.py
2
3424
# # Copyright (c) 2009-2015 Tom Keffer <tkeffer@gmail.com> and # Matthew Wall # # See the file LICENSE.txt for your full rights. # """Utilities for managing the config file""" import logging import sys import os import stat import platform from pkg_resources import resource_string import configobj import nimbuscfg from nimbuscfg import * from nimbuscfg.prompts import * from nimbuscfg.stanzas import * class ConfigEngine(object): def run(self, options): print "Configuring nimbus for %s - %s" % (platform.system(), platform.release()) try: config_dict = read_config() except SyntaxError, e: sys.exit("Syntax error in configuration file: %s" % e) except IOError, e: sys.exit("Unable to open configuration file: %s" % e) self.modify_config(config_dict) reorder_to_ref(config_dict) self.save_config(config_dict) if options.debian: print "Configuring for debian startup" self.configure_debian_startup() def configure_debian_startup(self): init_script = resource_string(__name__, 'init.d/nimbus.debian') with open("/etc/init.d/nimbus", 'w') as _file: _file.write(init_script) os.chmod("/etc/init.d/nimbus", stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IXOTH) def modify_config(self, config_dict): """Modify the configuration dictionary according to any command line options. Give the user a chance too. """ if 'Station' not in config_dict: config_dict['Station'] = {} config_dict.comments['Station'] = " " info = prompt_for_info() driver_dict = prompt_for_driver() driver_name = driver_dict.get('driver_name') driver_settings = prompt_for_driver_settings(driver_name) weather_services = prompt_for_weather_services() config_dict.update(weather_services) config_dict['Station'].update(info) config_dict['Station']['station_type'] = driver_name orig_stanza_text = None # if a previous stanza exists for this driver, grab it if driver_name in config_dict: orig_stanza = configobj.ConfigObj(interpolation=False) orig_stanza[driver_name] = config_dict[driver_name] orig_stanza_text = '\n'.join(orig_stanza.write()) if orig_stanza_text: stanza_text = orig_stanza_text else: stanza_text = config_stanzas[driver_name] stanza = configobj.ConfigObj(stanza_text.splitlines()) # Insert the stanza in the configuration dictionary: config_dict[driver_name] = stanza[driver_name] config_dict.comments[driver_name] = " " reorder_sections(config_dict, driver_name, 'Station', after=True) for k in driver_settings: config_dict[driver_name][k] = driver_settings[k] def save_config(self, config_dict): """Save the config file, backing up as necessary.""" config_path = os.path.join(nimbuscfg._config_dir, nimbuscfg._config_file) if not os.path.exists(nimbuscfg._config_dir): os.makedirs(nimbuscfg._config_dir) with open(config_path, 'w') as fd: config_dict.write(fd) logging.info("Saved configuration to %s" % config_path)
gpl-3.0
simkarlier/autorad.io
py/mysql/connector/network.py
5
17410
# MySQL Connector/Python - MySQL driver written in Python. # Copyright (c) 2012, 2014, Oracle and/or its affiliates. All rights reserved. # MySQL Connector/Python is licensed under the terms of the GPLv2 # <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most # MySQL Connectors. There are special exceptions to the terms and # conditions of the GPLv2 as it is applied to this software, see the # FOSS License Exception # <http://www.mysql.com/about/legal/licensing/foss-exception.html>. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """Module implementing low-level socket communication with MySQL servers. """ from collections import deque import socket import struct import sys import zlib try: import ssl except: # If import fails, we don't have SSL support. pass from . import constants, errors from .catch23 import PY2, init_bytearray, struct_unpack def _strioerror(err): """Reformat the IOError error message This function reformats the IOError error message. """ if not err.errno: return str(err) return '{errno} {strerr}'.format(errno=err.errno, strerr=err.strerror) def _prepare_packets(buf, pktnr): """Prepare a packet for sending to the MySQL server""" pkts = [] pllen = len(buf) maxpktlen = constants.MAX_PACKET_LENGTH while pllen > maxpktlen: pkts.append(b'\xff\xff\xff' + struct.pack('<B', pktnr) + buf[:maxpktlen]) buf = buf[maxpktlen:] pllen = len(buf) pktnr = pktnr + 1 pkts.append(struct.pack('<I', pllen)[0:3] + struct.pack('<B', pktnr) + buf) return pkts class BaseMySQLSocket(object): """Base class for MySQL socket communication This class should not be used directly but overloaded, changing the at least the open_connection()-method. Examples of subclasses are mysql.connector.network.MySQLTCPSocket mysql.connector.network.MySQLUnixSocket """ def __init__(self): self.sock = None # holds the socket connection self._connection_timeout = None self._packet_number = -1 self._packet_queue = deque() self.recvsize = 8192 @property def next_packet_number(self): """Increments the packet number""" self._packet_number = self._packet_number + 1 if self._packet_number > 255: self._packet_number = 0 return self._packet_number def open_connection(self): """Open the socket""" raise NotImplementedError def get_address(self): """Get the location of the socket""" raise NotImplementedError def shutdown(self): """Shut down the socket before closing it""" try: self.sock.shutdown(socket.SHUT_RDWR) self.sock.close() del self._packet_queue except (socket.error, AttributeError): pass def close_connection(self): """Close the socket""" try: self.sock.close() del self._packet_queue except (socket.error, AttributeError): pass def send_plain(self, buf, packet_number=None): """Send packets to the MySQL server""" if packet_number is None: self.next_packet_number # pylint: disable=W0104 else: self._packet_number = packet_number packets = _prepare_packets(buf, self._packet_number) for packet in packets: try: if PY2: self.sock.sendall(buffer(packet)) # pylint: disable=E0602 else: self.sock.sendall(packet) except IOError as err: raise errors.OperationalError( errno=2055, values=(self.get_address(), _strioerror(err))) except AttributeError: raise errors.OperationalError(errno=2006) send = send_plain def send_compressed(self, buf, packet_number=None): """Send compressed packets to the MySQL server""" if packet_number is None: self.next_packet_number # pylint: disable=W0104 else: self._packet_number = packet_number pktnr = self._packet_number pllen = len(buf) zpkts = [] maxpktlen = constants.MAX_PACKET_LENGTH if pllen > maxpktlen: pkts = _prepare_packets(buf, pktnr) if PY2: tmpbuf = bytearray() for pkt in pkts: tmpbuf += pkt tmpbuf = buffer(tmpbuf) # pylint: disable=E0602 else: tmpbuf = b''.join(pkts) del pkts seqid = 0 zbuf = zlib.compress(tmpbuf[:16384]) header = (struct.pack('<I', len(zbuf))[0:3] + struct.pack('<B', seqid) + b'\x00\x40\x00') if PY2: header = buffer(header) # pylint: disable=E0602 zpkts.append(header + zbuf) tmpbuf = tmpbuf[16384:] pllen = len(tmpbuf) seqid = seqid + 1 while pllen > maxpktlen: zbuf = zlib.compress(tmpbuf[:maxpktlen]) header = (struct.pack('<I', len(zbuf))[0:3] + struct.pack('<B', seqid) + b'\xff\xff\xff') if PY2: header = buffer(header) # pylint: disable=E0602 zpkts.append(header + zbuf) tmpbuf = tmpbuf[maxpktlen:] pllen = len(tmpbuf) seqid = seqid + 1 if tmpbuf: zbuf = zlib.compress(tmpbuf) header = (struct.pack('<I', len(zbuf))[0:3] + struct.pack('<B', seqid) + struct.pack('<I', pllen)[0:3]) if PY2: header = buffer(header) # pylint: disable=E0602 zpkts.append(header + zbuf) del tmpbuf else: pkt = (struct.pack('<I', pllen)[0:3] + struct.pack('<B', pktnr) + buf) if PY2: pkt = buffer(pkt) # pylint: disable=E0602 pllen = len(pkt) if pllen > 50: zbuf = zlib.compress(pkt) zpkts.append(struct.pack('<I', len(zbuf))[0:3] + struct.pack('<B', 0) + struct.pack('<I', pllen)[0:3] + zbuf) else: header = (struct.pack('<I', pllen)[0:3] + struct.pack('<B', 0) + struct.pack('<I', 0)[0:3]) if PY2: header = buffer(header) # pylint: disable=E0602 zpkts.append(header + pkt) for zip_packet in zpkts: try: self.sock.sendall(zip_packet) except IOError as err: raise errors.OperationalError( errno=2055, values=(self.get_address(), _strioerror(err))) except AttributeError: raise errors.OperationalError(errno=2006) def recv_plain(self): """Receive packets from the MySQL server""" try: # Read the header of the MySQL packet, 4 bytes packet = bytearray(b'') while len(packet) < 4: chunk = self.sock.recv(4) if not chunk: raise errors.InterfaceError(errno=2013) packet += chunk # Save the packet number and payload length self._packet_number = packet[3] if PY2: payload_len = struct.unpack_from( "<I", buffer(packet[0:3] + b'\x00'))[0] # pylint: disable=E0602 else: payload_len = struct.unpack("<I", packet[0:3] + b'\x00')[0] # Read the payload rest = payload_len packet.extend(bytearray(payload_len)) packet_view = memoryview(packet) # pylint: disable=E0602 packet_view = packet_view[4:] while rest: read = self.sock.recv_into(packet_view, rest) if read == 0 and rest > 0: raise errors.InterfaceError(errno=2013) packet_view = packet_view[read:] rest -= read return packet except IOError as err: raise errors.OperationalError( errno=2055, values=(self.get_address(), _strioerror(err))) def recv_py26_plain(self): """Receive packets from the MySQL server""" try: # Read the header of the MySQL packet, 4 bytes header = bytearray(b'') while len(header) < 4: chunk = self.sock.recv(4) if not chunk: raise errors.InterfaceError(errno=2013) header += chunk # Save the packet number and payload length self._packet_number = header[3] payload_len = struct_unpack("<I", header[0:3] + b'\x00')[0] # Read the payload rest = payload_len payload = init_bytearray(b'') while rest > 0: chunk = self.sock.recv(rest) if not chunk: raise errors.InterfaceError(errno=2013) payload += chunk rest = payload_len - len(payload) return header + payload except IOError as err: raise errors.OperationalError( errno=2055, values=(self.get_address(), _strioerror(err))) if sys.version_info[0:2] == (2, 6): recv = recv_py26_plain recv_plain = recv_py26_plain else: recv = recv_plain def _split_zipped_payload(self, packet_bunch): """Split compressed payload""" while packet_bunch: payload_length = struct_unpack("<I", packet_bunch[0:3] + b'\x00')[0] self._packet_queue.append(packet_bunch[0:payload_length + 4]) packet_bunch = packet_bunch[payload_length + 4:] def recv_compressed(self): """Receive compressed packets from the MySQL server""" try: return self._packet_queue.popleft() except IndexError: pass header = bytearray(b'') packets = [] try: abyte = self.sock.recv(1) while abyte and len(header) < 7: header += abyte abyte = self.sock.recv(1) while header: if len(header) < 7: raise errors.InterfaceError(errno=2013) zip_payload_length = struct_unpack("<I", header[0:3] + b'\x00')[0] payload_length = struct_unpack("<I", header[4:7] + b'\x00')[0] zip_payload = init_bytearray(abyte) while len(zip_payload) < zip_payload_length: chunk = self.sock.recv(zip_payload_length - len(zip_payload)) if len(chunk) == 0: raise errors.InterfaceError(errno=2013) zip_payload = zip_payload + chunk if payload_length == 0: self._split_zipped_payload(zip_payload) return self._packet_queue.popleft() packets.append(header + zip_payload) if payload_length != 16384: break header = init_bytearray(b'') abyte = self.sock.recv(1) while abyte and len(header) < 7: header += abyte abyte = self.sock.recv(1) except IOError as err: raise errors.OperationalError( errno=2055, values=(self.get_address(), _strioerror(err))) tmp = init_bytearray(b'') for packet in packets: payload_length = struct_unpack("<I", header[4:7] + b'\x00')[0] if payload_length == 0: tmp.append(packet[7:]) else: if PY2: tmp += zlib.decompress( buffer(packet[7:])) # pylint: disable=E0602 else: tmp += zlib.decompress(packet[7:]) self._split_zipped_payload(tmp) del tmp try: return self._packet_queue.popleft() except IndexError: pass def set_connection_timeout(self, timeout): """Set the connection timeout""" self._connection_timeout = timeout # pylint: disable=C0103 def switch_to_ssl(self, ca, cert, key, verify_cert=False): """Switch the socket to use SSL""" if not self.sock: raise errors.InterfaceError(errno=2048) try: if verify_cert: cert_reqs = ssl.CERT_REQUIRED else: cert_reqs = ssl.CERT_NONE self.sock = ssl.wrap_socket( self.sock, keyfile=key, certfile=cert, ca_certs=ca, cert_reqs=cert_reqs, do_handshake_on_connect=False, ssl_version=ssl.PROTOCOL_TLSv1) self.sock.do_handshake() except NameError: raise errors.NotSupportedError( "Python installation has no SSL support") except (ssl.SSLError, IOError) as err: raise errors.InterfaceError( errno=2055, values=(self.get_address(), _strioerror(err))) except NotImplementedError as err: raise errors.InterfaceError(str(err)) # pylint: enable=C0103 class MySQLUnixSocket(BaseMySQLSocket): """MySQL socket class using UNIX sockets Opens a connection through the UNIX socket of the MySQL Server. """ def __init__(self, unix_socket='/tmp/mysql.sock'): super(MySQLUnixSocket, self).__init__() self.unix_socket = unix_socket def get_address(self): return self.unix_socket def open_connection(self): try: self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.sock.settimeout(self._connection_timeout) self.sock.connect(self.unix_socket) except IOError as err: raise errors.InterfaceError( errno=2002, values=(self.get_address(), _strioerror(err))) except Exception as err: raise errors.InterfaceError(str(err)) class MySQLTCPSocket(BaseMySQLSocket): """MySQL socket class using TCP/IP Opens a TCP/IP connection to the MySQL Server. """ def __init__(self, host='127.0.0.1', port=3306, force_ipv6=False): super(MySQLTCPSocket, self).__init__() self.server_host = host self.server_port = port self.force_ipv6 = force_ipv6 self._family = 0 def get_address(self): return "{0}:{1}".format(self.server_host, self.server_port) def open_connection(self): """Open the TCP/IP connection to the MySQL server """ # Get address information addrinfo = [None] * 5 try: addrinfos = socket.getaddrinfo(self.server_host, self.server_port, 0, socket.SOCK_STREAM, socket.SOL_TCP) # If multiple results we favor IPv4, unless IPv6 was forced. for info in addrinfos: if self.force_ipv6 and info[0] == socket.AF_INET6: addrinfo = info break elif info[0] == socket.AF_INET: addrinfo = info break if self.force_ipv6 and addrinfo[0] is None: raise errors.InterfaceError( "No IPv6 address found for {0}".format(self.server_host)) if addrinfo[0] is None: addrinfo = addrinfos[0] except IOError as err: raise errors.InterfaceError( errno=2003, values=(self.get_address(), _strioerror(err))) else: (self._family, socktype, proto, _, sockaddr) = addrinfo # Instanciate the socket and connect try: self.sock = socket.socket(self._family, socktype, proto) self.sock.settimeout(self._connection_timeout) self.sock.connect(sockaddr) except IOError as err: raise errors.InterfaceError( errno=2003, values=(self.get_address(), _strioerror(err))) except Exception as err: raise errors.OperationalError(str(err))
gpl-3.0